Implement workspaces in Cargo
authorAlex Crichton <alex@alexcrichton.com>
Sun, 15 May 2016 00:14:24 +0000 (17:14 -0700)
committerAlex Crichton <alex@alexcrichton.com>
Tue, 5 Jul 2016 22:42:22 +0000 (15:42 -0700)
This commit is an implementation of [RFC 1525] which specifies the addition of
**workspaces** to Cargo.

[RFC 1525]: https://github.com/rust-lang/rfcs/blob/master/text/1525-cargo-workspace.md

A workspace is a group of crates which are all compiled into the same output
directory and share the same `Cargo.lock` file. This means that dependencies are
cached between builds as well as dependencies all being shared at the same
versions. An update to any one dependency transitively affects all other members
of the workspace.

Typical repository layouts with a crate at the root and a number of path
dependencies simply need to add the following to the root `Cargo.toml`:

```toml
[workspace]
```

Otherwise more advanced configuration may be necessary through the
`package.workspace` or `workspace.members` keys. More information can be found
as part of [RFC 1525].

48 files changed:
src/bin/bench.rs
src/bin/build.rs
src/bin/clean.rs
src/bin/doc.rs
src/bin/fetch.rs
src/bin/generate_lockfile.rs
src/bin/metadata.rs
src/bin/package.rs
src/bin/pkgid.rs
src/bin/publish.rs
src/bin/run.rs
src/bin/rustc.rs
src/bin/rustdoc.rs
src/bin/test.rs
src/bin/update.rs
src/cargo/core/manifest.rs
src/cargo/core/mod.rs
src/cargo/core/resolver/encode.rs
src/cargo/core/resolver/mod.rs
src/cargo/core/workspace.rs [new file with mode: 0644]
src/cargo/ops/cargo_clean.rs
src/cargo/ops/cargo_compile.rs
src/cargo/ops/cargo_doc.rs
src/cargo/ops/cargo_fetch.rs
src/cargo/ops/cargo_generate_lockfile.rs
src/cargo/ops/cargo_install.rs
src/cargo/ops/cargo_new.rs
src/cargo/ops/cargo_output_metadata.rs
src/cargo/ops/cargo_package.rs
src/cargo/ops/cargo_pkgid.rs
src/cargo/ops/cargo_read_manifest.rs
src/cargo/ops/cargo_run.rs
src/cargo/ops/cargo_rustc/layout.rs
src/cargo/ops/cargo_rustc/mod.rs
src/cargo/ops/cargo_test.rs
src/cargo/ops/lockfile.rs
src/cargo/ops/mod.rs
src/cargo/ops/registry.rs
src/cargo/ops/resolve.rs
src/cargo/sources/git/source.rs
src/cargo/util/config.rs
src/cargo/util/toml.rs
src/doc/manifest.md
src/rustversion.txt
tests/cargotest/support/mod.rs
tests/cross-compile.rs
tests/resolve.rs
tests/workspaces.rs [new file with mode: 0644]

index dc0a1a4daaab301204d6ad00b253c70926a434db..3db26741f4e3aa074c6ce938d9267002177cb53f 100644 (file)
@@ -1,3 +1,4 @@
+use cargo::core::Workspace;
 use cargo::ops;
 use cargo::util::{CliResult, CliError, Human, Config, human};
 use cargo::util::important_paths::{find_root_manifest_for_wd};
@@ -91,7 +92,8 @@ pub fn execute(options: Options, config: &Config) -> CliResult<Option<()>> {
         },
     };
 
-    let err = try!(ops::run_benches(&root, &ops, &options.arg_args));
+    let ws = try!(Workspace::new(&root, config));
+    let err = try!(ops::run_benches(&ws, &ops, &options.arg_args));
     match err {
         None => Ok(None),
         Some(err) => {
index 8a106436a9df90c250a428f7388ab6618674b5d5..3fc0fe32e6cf52b748a187ad424937cff72e5626 100644 (file)
@@ -1,5 +1,6 @@
 use std::env;
 
+use cargo::core::Workspace;
 use cargo::ops::CompileOptions;
 use cargo::ops;
 use cargo::util::important_paths::{find_root_manifest_for_wd};
@@ -86,6 +87,7 @@ pub fn execute(options: Options, config: &Config) -> CliResult<Option<()>> {
         target_rustc_args: None,
     };
 
-    try!(ops::compile(&root, &opts));
+    let ws = try!(Workspace::new(&root, config));
+    try!(ops::compile(&ws, &opts));
     Ok(None)
 }
index af3968cf816f4af1f1cea6dceed9db9ed2c6a7d1..36ec7db1771edb59466bd1c7d75de7d0fb980973 100644 (file)
@@ -1,5 +1,6 @@
 use std::env;
 
+use cargo::core::Workspace;
 use cargo::ops;
 use cargo::util::{CliResult, Config};
 use cargo::util::important_paths::{find_root_manifest_for_wd};
@@ -50,6 +51,7 @@ pub fn execute(options: Options, config: &Config) -> CliResult<Option<()>> {
         target: options.flag_target.as_ref().map(|s| &s[..]),
         release: options.flag_release,
     };
-    try!(ops::clean(&root, &opts));
+    let ws = try!(Workspace::new(&root, config));
+    try!(ops::clean(&ws, &opts));
     Ok(None)
 }
index b7804dbd3ca77dcd8ea598932c536d7e75d01f0a..1c420def853ba89848e6e40d20fa66796f8d6ece 100644 (file)
@@ -1,3 +1,4 @@
+use cargo::core::Workspace;
 use cargo::ops;
 use cargo::util::{CliResult, Config};
 use cargo::util::important_paths::{find_root_manifest_for_wd};
@@ -84,6 +85,7 @@ pub fn execute(options: Options, config: &Config) -> CliResult<Option<()>> {
         },
     };
 
-    try!(ops::doc(&root, &doc_opts));
+    let ws = try!(Workspace::new(&root, config));
+    try!(ops::doc(&ws, &doc_opts));
     Ok(None)
 }
index 0d72fff44b528c1805a9a80ad76c005dd1b6cc9b..2870dc63a88378b8b4b8d40a7c9e4e462811020d 100644 (file)
@@ -1,3 +1,4 @@
+use cargo::core::Workspace;
 use cargo::ops;
 use cargo::util::{CliResult, Config};
 use cargo::util::important_paths::find_root_manifest_for_wd;
@@ -38,7 +39,8 @@ pub fn execute(options: Options, config: &Config) -> CliResult<Option<()>> {
                                 options.flag_quiet,
                                 &options.flag_color));
     let root = try!(find_root_manifest_for_wd(options.flag_manifest_path, config.cwd()));
-    try!(ops::fetch(&root, config));
+    let ws = try!(Workspace::new(&root, config));
+    try!(ops::fetch(&ws));
     Ok(None)
 }
 
index 83b44d4eeeffa8a18bb2514b12181a0675fcec3f..a49930fd9ab1960aa70fb15042b2b73a859adf6f 100644 (file)
@@ -1,5 +1,6 @@
 use std::env;
 
+use cargo::core::Workspace;
 use cargo::ops;
 use cargo::util::{CliResult, Config};
 use cargo::util::important_paths::find_root_manifest_for_wd;
@@ -33,6 +34,7 @@ pub fn execute(options: Options, config: &Config) -> CliResult<Option<()>> {
                                 &options.flag_color));
     let root = try!(find_root_manifest_for_wd(options.flag_manifest_path, config.cwd()));
 
-    try!(ops::generate_lockfile(&root, config));
+    let ws = try!(Workspace::new(&root, config));
+    try!(ops::generate_lockfile(&ws));
     Ok(None)
 }
index b959be2c0e31a4700c349f8c9710f32c28ccbeea..d8a9f1fe5db91cecf7eb1215ec60a4d9802efb12 100644 (file)
@@ -1,8 +1,4 @@
-extern crate cargo;
-extern crate docopt;
-extern crate rustc_serialize;
-extern crate toml;
-
+use cargo::core::Workspace;
 use cargo::ops::{output_metadata, OutputMetadataOptions, ExportInfo};
 use cargo::util::important_paths::find_root_manifest_for_wd;
 use cargo::util::{CliResult, Config};
@@ -48,12 +44,12 @@ pub fn execute(options: Options, config: &Config) -> CliResult<Option<ExportInfo
 
     let options = OutputMetadataOptions {
         features: options.flag_features,
-        manifest_path: &manifest,
         no_default_features: options.flag_no_default_features,
         no_deps: options.flag_no_deps,
         version: options.flag_format_version,
     };
 
-    let result = try!(output_metadata(options, config));
+    let ws = try!(Workspace::new(&manifest, config));
+    let result = try!(output_metadata(&ws, &options));
     Ok(Some(result))
 }
index 0336331a21bc74a90784988bd9bd1e8829bbf1d4..dda718373dbc71bf66fc64942b201877d9b011fb 100644 (file)
@@ -1,3 +1,4 @@
+use cargo::core::Workspace;
 use cargo::ops;
 use cargo::util::{CliResult, Config};
 use cargo::util::important_paths::find_root_manifest_for_wd;
@@ -38,7 +39,8 @@ pub fn execute(options: Options, config: &Config) -> CliResult<Option<()>> {
                                 options.flag_quiet,
                                 &options.flag_color));
     let root = try!(find_root_manifest_for_wd(options.flag_manifest_path, config.cwd()));
-    try!(ops::package(&root, &ops::PackageOpts {
+    let ws = try!(Workspace::new(&root, config));
+    try!(ops::package(&ws, &ops::PackageOpts {
         config: config,
         verify: !options.flag_no_verify,
         list: options.flag_list,
index beff81e3db9e66b76190bf302665d83b37720a65..963caf11f046fb72fcc78fcc9d537e812f333eef 100644 (file)
@@ -1,3 +1,4 @@
+use cargo::core::Workspace;
 use cargo::ops;
 use cargo::util::{CliResult, Config};
 use cargo::util::important_paths::{find_root_manifest_for_wd};
@@ -51,9 +52,10 @@ pub fn execute(options: Options,
                                 options.flag_quiet,
                                 &options.flag_color));
     let root = try!(find_root_manifest_for_wd(options.flag_manifest_path.clone(), config.cwd()));
+    let ws = try!(Workspace::new(&root, config));
 
     let spec = options.arg_spec.as_ref().map(|s| &s[..]);
-    let spec = try!(ops::pkgid(&root, spec, config));
+    let spec = try!(ops::pkgid(&ws, spec));
     println!("{}", spec);
     Ok(None)
 }
index 32b7f5e400c1ae670d1793b40d05b14091cee05e..1300668688368f8c864965b4b68a5ef0ee75b81a 100644 (file)
@@ -1,3 +1,4 @@
+use cargo::core::Workspace;
 use cargo::ops;
 use cargo::util::{CliResult, Config};
 use cargo::util::important_paths::find_root_manifest_for_wd;
@@ -47,7 +48,8 @@ pub fn execute(options: Options, config: &Config) -> CliResult<Option<()>> {
     } = options;
 
     let root = try!(find_root_manifest_for_wd(flag_manifest_path.clone(), config.cwd()));
-    try!(ops::publish(&root, &ops::PublishOpts {
+    let ws = try!(Workspace::new(&root, config));
+    try!(ops::publish(&ws, &ops::PublishOpts {
         config: config,
         token: token,
         index: host,
index 408f16ddca94ea8410867def61546c824d1822c2..378e3dc240037c5c0942f7edfe1590de1391ef9e 100644 (file)
@@ -1,3 +1,4 @@
+use cargo::core::Workspace;
 use cargo::ops;
 use cargo::util::{CliResult, CliError, Config, Human};
 use cargo::util::important_paths::{find_root_manifest_for_wd};
@@ -85,7 +86,8 @@ pub fn execute(options: Options, config: &Config) -> CliResult<Option<()>> {
         target_rustc_args: None,
     };
 
-    match try!(ops::run(&root, &compile_opts, &options.arg_args)) {
+    let ws = try!(Workspace::new(&root, config));
+    match try!(ops::run(&ws, &compile_opts, &options.arg_args)) {
         None => Ok(None),
         Some(err) => {
             // If we never actually spawned the process then that sounds pretty
index 4a821a8c0e7c347cc65df5492ac9d9f148a62359..cc029c0a4898745b75ac8571b6efa2548d221571 100644 (file)
@@ -1,5 +1,6 @@
 use std::env;
 
+use cargo::core::Workspace;
 use cargo::ops::{CompileOptions, CompileMode};
 use cargo::ops;
 use cargo::util::important_paths::{find_root_manifest_for_wd};
@@ -104,7 +105,8 @@ pub fn execute(options: Options, config: &Config) -> CliResult<Option<()>> {
         target_rustc_args: options.arg_opts.as_ref().map(|a| &a[..]),
     };
 
-    try!(ops::compile(&root, &opts));
+    let ws = try!(Workspace::new(&root, config));
+    try!(ops::compile(&ws, &opts));
     Ok(None)
 }
 
index ecd33ecb2b93ac91c0b703934837527825b6dd09..c5710a184c3b0f4ecafe11f5ae89724768546812 100644 (file)
@@ -1,3 +1,4 @@
+use cargo::core::Workspace;
 use cargo::ops;
 use cargo::util::{CliResult, Config};
 use cargo::util::important_paths::{find_root_manifest_for_wd};
@@ -91,7 +92,8 @@ pub fn execute(options: Options, config: &Config) -> CliResult<Option<()>> {
         },
     };
 
-    try!(ops::doc(&root, &doc_opts));
+    let ws = try!(Workspace::new(&root, config));
+    try!(ops::doc(&ws, &doc_opts));
 
     Ok(None)
 }
index 9e017117eeb72be7b0ff8cf6ca58d4ab51e48362..f7f53bc4bfdb4efb8f38eadff5419a0be91105a4 100644 (file)
@@ -1,3 +1,4 @@
+use cargo::core::Workspace;
 use cargo::ops;
 use cargo::util::{CliResult, CliError, Human, human, Config};
 use cargo::util::important_paths::{find_root_manifest_for_wd};
@@ -119,7 +120,8 @@ pub fn execute(options: Options, config: &Config) -> CliResult<Option<()>> {
         },
     };
 
-    let err = try!(ops::run_tests(&root, &ops, &options.arg_args));
+    let ws = try!(Workspace::new(&root, config));
+    let err = try!(ops::run_tests(&ws, &ops, &options.arg_args));
     match err {
         None => Ok(None),
         Some(err) => {
index 12d26883888912c433227dfb0be8dfc4dc98f8a1..8c72dd1118dc6809a2528f84c698a1ce541cd8d3 100644 (file)
@@ -1,5 +1,6 @@
 use std::env;
 
+use cargo::core::Workspace;
 use cargo::ops;
 use cargo::util::{CliResult, Config};
 use cargo::util::important_paths::find_root_manifest_for_wd;
@@ -66,6 +67,7 @@ pub fn execute(options: Options, config: &Config) -> CliResult<Option<()>> {
         config: config,
     };
 
-    try!(ops::update_lockfile(&root, &update_opts));
+    let ws = try!(Workspace::new(&root, config));
+    try!(ops::update_lockfile(&ws, &update_opts));
     Ok(None)
 }
index d7133878d5853255e3ef5a3c8d7615006bc7c126..a4b401df0315a31d8bc441a88228ee0350a816be 100644 (file)
@@ -4,9 +4,14 @@ use std::path::{PathBuf, Path};
 use semver::Version;
 use rustc_serialize::{Encoder, Encodable};
 
-use core::{Dependency, PackageId, PackageIdSpec, Summary};
+use core::{Dependency, PackageId, PackageIdSpec, Summary, WorkspaceConfig};
 use core::package_id::Metadata;
 
+pub enum EitherManifest {
+    Real(Manifest),
+    Virtual(VirtualManifest),
+}
+
 /// Contains all the information about a package, as loaded from a Cargo.toml.
 #[derive(Clone, Debug)]
 pub struct Manifest {
@@ -20,6 +25,13 @@ pub struct Manifest {
     profiles: Profiles,
     publish: bool,
     replace: Vec<(PackageIdSpec, Dependency)>,
+    workspace: WorkspaceConfig,
+}
+
+#[derive(Clone, Debug)]
+pub struct VirtualManifest {
+    replace: Vec<(PackageIdSpec, Dependency)>,
+    workspace: WorkspaceConfig,
 }
 
 /// General metadata about a package which is just blindly uploaded to the
@@ -175,7 +187,8 @@ impl Manifest {
                metadata: ManifestMetadata,
                profiles: Profiles,
                publish: bool,
-               replace: Vec<(PackageIdSpec, Dependency)>) -> Manifest {
+               replace: Vec<(PackageIdSpec, Dependency)>,
+               workspace: WorkspaceConfig) -> Manifest {
         Manifest {
             summary: summary,
             targets: targets,
@@ -187,6 +200,7 @@ impl Manifest {
             profiles: profiles,
             publish: publish,
             replace: replace,
+            workspace: workspace,
         }
     }
 
@@ -207,6 +221,10 @@ impl Manifest {
         self.links.as_ref().map(|s| &s[..])
     }
 
+    pub fn workspace_config(&self) -> &WorkspaceConfig {
+        &self.workspace
+    }
+
     pub fn add_warning(&mut self, s: String) {
         self.warnings.push(s)
     }
@@ -216,6 +234,24 @@ impl Manifest {
     }
 }
 
+impl VirtualManifest {
+    pub fn new(replace: Vec<(PackageIdSpec, Dependency)>,
+               workspace: WorkspaceConfig) -> VirtualManifest {
+        VirtualManifest {
+            replace: replace,
+            workspace: workspace,
+        }
+    }
+
+    pub fn replace(&self) -> &[(PackageIdSpec, Dependency)] {
+        &self.replace
+    }
+
+    pub fn workspace_config(&self) -> &WorkspaceConfig {
+        &self.workspace
+    }
+}
+
 impl Target {
     fn blank() -> Target {
         Target {
index aa2c75d1a6c3c38097e021906c0314dda6c1c1a7..305b6ed7c7404ddb4f93eec1c1a938bc4539033e 100644 (file)
@@ -1,5 +1,6 @@
 pub use self::dependency::{Dependency, DependencyInner};
 pub use self::manifest::{Manifest, Target, TargetKind, Profile, LibKind, Profiles};
+pub use self::manifest::{EitherManifest, VirtualManifest};
 pub use self::package::{Package, PackageSet};
 pub use self::package_id::{PackageId, Metadata};
 pub use self::package_id_spec::PackageIdSpec;
@@ -8,6 +9,7 @@ pub use self::resolver::Resolve;
 pub use self::shell::{Shell, MultiShell, ShellConfig, Verbosity, ColorConfig};
 pub use self::source::{Source, SourceId, SourceMap, GitReference};
 pub use self::summary::Summary;
+pub use self::workspace::{Workspace, WorkspaceConfig};
 
 pub mod source;
 pub mod package;
@@ -19,3 +21,4 @@ pub mod summary;
 pub mod shell;
 pub mod registry;
 mod package_id_spec;
+mod workspace;
index 6b3e3926fe5a9989a7ba53b08c37f78afcdb736b..fb786bc184597b3dd0ec59abb5c7b4625ab53e6d 100644 (file)
@@ -3,7 +3,7 @@ use std::collections::{HashMap, BTreeMap};
 use regex::Regex;
 use rustc_serialize::{Encodable, Encoder, Decodable, Decoder};
 
-use core::{Package, PackageId, SourceId};
+use core::{Package, PackageId, SourceId, Workspace};
 use util::{CargoResult, Graph, Config};
 
 use super::Resolve;
@@ -18,11 +18,9 @@ pub struct EncodableResolve {
 pub type Metadata = BTreeMap<String, String>;
 
 impl EncodableResolve {
-    pub fn to_resolve(&self, root: &Package, config: &Config)
-                      -> CargoResult<Resolve> {
-        let mut path_deps = HashMap::new();
-        try!(build_path_deps(root, &mut path_deps, config));
-        let default = root.package_id().source_id();
+    pub fn to_resolve(&self, ws: &Workspace) -> CargoResult<Resolve> {
+        let path_deps = build_path_deps(ws);
+        let default = try!(ws.current()).package_id().source_id();
 
         let mut g = Graph::new();
         let mut tmp = HashMap::new();
@@ -103,33 +101,44 @@ impl EncodableResolve {
     }
 }
 
-fn build_path_deps(root: &Package,
-                   map: &mut HashMap<String, SourceId>,
-                   config: &Config)
-                   -> CargoResult<()> {
-    // If the root crate is *not* a path source, then we're probably in a
-    // situation such as `cargo install` with a lock file from a remote
-    // dependency. In that case we don't need to fixup any path dependencies (as
-    // they're not actually path dependencies any more), so we ignore them.
-    if !root.package_id().source_id().is_path() {
-        return Ok(())
+fn build_path_deps(ws: &Workspace) -> HashMap<String, SourceId> {
+    // If a crate is *not* a path source, then we're probably in a situation
+    // such as `cargo install` with a lock file from a remote dependency. In
+    // that case we don't need to fixup any path dependencies (as they're not
+    // actually path dependencies any more), so we ignore them.
+    let members = ws.members().filter(|p| {
+        p.package_id().source_id().is_path()
+    }).collect::<Vec<_>>();
+
+    let mut ret = HashMap::new();
+    for member in members.iter() {
+        ret.insert(member.package_id().name().to_string(),
+                   member.package_id().source_id().clone());
+    }
+    for member in members.iter() {
+        build(member, ws.config(), &mut ret);
     }
 
-    let deps = root.dependencies()
-                   .iter()
-                   .map(|d| d.source_id())
-                   .filter(|id| id.is_path())
-                   .filter_map(|id| id.url().to_file_path().ok())
-                   .map(|path| path.join("Cargo.toml"))
-                   .filter_map(|path| Package::for_path(&path, config).ok());
-    for pkg in deps {
-        let source_id = pkg.package_id().source_id();
-        if map.insert(pkg.name().to_string(), source_id.clone()).is_none() {
-            try!(build_path_deps(&pkg, map, config));
+    return ret;
+
+    fn build(pkg: &Package,
+             config: &Config,
+             ret: &mut HashMap<String, SourceId>) {
+        let deps = pkg.dependencies()
+                      .iter()
+                      .filter(|d| !ret.contains_key(d.name()))
+                      .map(|d| d.source_id())
+                      .filter(|id| id.is_path())
+                      .filter_map(|id| id.url().to_file_path().ok())
+                      .map(|path| path.join("Cargo.toml"))
+                      .filter_map(|path| Package::for_path(&path, config).ok())
+                      .collect::<Vec<_>>();
+        for pkg in deps {
+            ret.insert(pkg.name().to_string(),
+                       pkg.package_id().source_id().clone());
+            build(&pkg, config, ret);
         }
     }
-
-    Ok(())
 }
 
 fn to_package_id(name: &str,
index be690ba43056c15cac953ce777f6e30a27630360..2534e615bd677b9c7acf5040fb9fc57382161609 100644 (file)
@@ -199,20 +199,17 @@ struct Context<'a> {
 }
 
 /// Builds the list of all packages required to build the first argument.
-pub fn resolve(summary: &Summary,
-               method: &Method,
+pub fn resolve(root: &PackageId,
+               summaries: &[(Summary, Method)],
                replacements: &[(PackageIdSpec, Dependency)],
                registry: &mut Registry) -> CargoResult<Resolve> {
-    trace!("resolve; summary={}", summary.package_id());
-    let summary = Rc::new(summary.clone());
-
     let cx = Context {
-        resolve: Resolve::new(summary.package_id().clone()),
+        resolve: Resolve::new(root.clone()),
         activations: HashMap::new(),
         replacements: replacements,
     };
-    let _p = profile::start(format!("resolving: {}", summary.package_id()));
-    let cx = try!(activate_deps_loop(cx, registry, summary, method));
+    let _p = profile::start(format!("resolving: {}", root));
+    let cx = try!(activate_deps_loop(cx, registry, summaries));
     try!(check_cycles(&cx));
     Ok(cx.resolve)
 }
@@ -357,8 +354,8 @@ struct BacktrackFrame<'a> {
 /// dependency graph, cx.resolve is returned.
 fn activate_deps_loop<'a>(mut cx: Context<'a>,
                           registry: &mut Registry,
-                          top: Rc<Summary>,
-                          top_method: &Method) -> CargoResult<Context<'a>> {
+                          summaries: &[(Summary, Method)])
+                          -> CargoResult<Context<'a>> {
     // Note that a `BinaryHeap` is used for the remaining dependencies that need
     // activation. This heap is sorted such that the "largest value" is the most
     // constrained dependency, or the one with the least candidates.
@@ -368,9 +365,13 @@ fn activate_deps_loop<'a>(mut cx: Context<'a>,
     // use (those with more candidates).
     let mut backtrack_stack = Vec::new();
     let mut remaining_deps = BinaryHeap::new();
-    remaining_deps.extend(try!(activate(&mut cx, registry, None,
-                                        Candidate { summary: top, replace: None },
-                                        &top_method)));
+    for &(ref summary, ref method) in summaries {
+        debug!("initial activation: {}", summary.package_id());
+        let summary = Rc::new(summary.clone());
+        let candidate = Candidate { summary: summary, replace: None };
+        remaining_deps.extend(try!(activate(&mut cx, registry, None, candidate,
+                                            method)));
+    }
 
     // Main resolution loop, this is the workhorse of the resolution algorithm.
     //
diff --git a/src/cargo/core/workspace.rs b/src/cargo/core/workspace.rs
new file mode 100644 (file)
index 0000000..074c137
--- /dev/null
@@ -0,0 +1,467 @@
+use std::collections::hash_map::{HashMap, Entry};
+use std::collections::BTreeMap;
+use std::path::{Path, PathBuf};
+use std::slice;
+
+use core::{Package, VirtualManifest, EitherManifest, SourceId};
+use core::{PackageIdSpec, Dependency};
+use ops;
+use util::{Config, CargoResult};
+use util::paths;
+
+/// The core abstraction in Cargo for working with a workspace of crates.
+///
+/// A workspace is often created very early on and then threaded through all
+/// other functions. It's typically through this object that the current
+/// package is loaded and/or learned about.
+pub struct Workspace<'cfg> {
+    config: &'cfg Config,
+
+    // This path is a path to where the current cargo subcommand was invoked
+    // from. That is, this is the `--manifest-path` argument to Cargo, and
+    // points to the "main crate" that we're going to worry about.
+    current_manifest: PathBuf,
+
+    // A list of packages found in this workspace. Always includes at least the
+    // package mentioned by `current_manifest`.
+    packages: Packages<'cfg>,
+
+    // If this workspace includes more than one crate, this points to the root
+    // of the workspace. This is `None` in the case that `[workspace]` is
+    // missing, `package.workspace` is missing, and no `Cargo.toml` above
+    // `current_manifest` was found on the filesystem with `[workspace]`.
+    root_manifest: Option<PathBuf>,
+
+    // List of members in this workspace with a listing of all their manifest
+    // paths. The packages themselves can be looked up through the `packages`
+    // set above.
+    members: Vec<PathBuf>,
+}
+
+// Separate structure for tracking loaded packages (to avoid loading anything
+// twice), and this is separate to help appease the borrow checker.
+struct Packages<'cfg> {
+    config: &'cfg Config,
+    packages: HashMap<PathBuf, MaybePackage>,
+}
+
+enum MaybePackage {
+    Package(Package),
+    Virtual(VirtualManifest),
+}
+
+/// Configuration of a workspace in a manifest.
+#[derive(Debug, Clone)]
+pub enum WorkspaceConfig {
+    /// Indicates that `[workspace]` was present and the members were
+    /// optionally specified as well.
+    Root { members: Option<Vec<String>> },
+
+    /// Indicates that `[workspace]` was present and the `root` field is the
+    /// optional value of `package.workspace`, if present.
+    Member { root: Option<String> },
+}
+
+/// An iterator over the member packages of a workspace, returned by
+/// `Workspace::members`
+pub struct Members<'a, 'cfg: 'a> {
+    ws: &'a Workspace<'cfg>,
+    iter: slice::Iter<'a, PathBuf>,
+}
+
+impl<'cfg> Workspace<'cfg> {
+    /// Creates a new workspace given the target manifest pointed to by
+    /// `manifest_path`.
+    ///
+    /// This function will construct the entire workspace by determining the
+    /// root and all member packages. It will then validate the workspace
+    /// before returning it, so `Ok` is only returned for valid workspaces.
+    pub fn new(manifest_path: &Path, config: &'cfg Config)
+               -> CargoResult<Workspace<'cfg>> {
+        let mut ws = Workspace {
+            config: config,
+            current_manifest: manifest_path.to_path_buf(),
+            packages: Packages {
+                config: config,
+                packages: HashMap::new(),
+            },
+            root_manifest: None,
+            members: Vec::new(),
+        };
+        ws.root_manifest = try!(ws.find_root(manifest_path));
+        try!(ws.find_members());
+        try!(ws.validate());
+        Ok(ws)
+    }
+
+    /// Creates a "tempoarary workspace" from one package which only contains
+    /// that package.
+    ///
+    /// This constructor will not touch the filesystem and only creates an
+    /// in-memory workspace. That is, all configuration is ignored, it's just
+    /// intended for that one package.
+    ///
+    /// This is currently only used in niche situations like `cargo install` or
+    /// `cargo package`.
+    pub fn one(package: Package, config: &'cfg Config) -> Workspace<'cfg> {
+        let mut ws = Workspace {
+            config: config,
+            current_manifest: package.manifest_path().to_path_buf(),
+            packages: Packages {
+                config: config,
+                packages: HashMap::new(),
+            },
+            root_manifest: None,
+            members: Vec::new(),
+        };
+        {
+            let key = ws.current_manifest.parent().unwrap();
+            let package = MaybePackage::Package(package);
+            ws.packages.packages.insert(key.to_path_buf(), package);
+            ws.members.push(ws.current_manifest.clone());
+        }
+        return ws
+    }
+
+    /// Returns the current package of this workspace.
+    ///
+    /// Note that this can return an error if it the current manifest is
+    /// actually a "virtual Cargo.toml", in which case an error is returned
+    /// indicating that something else should be passed.
+    pub fn current(&self) -> CargoResult<&Package> {
+        match *self.packages.get(&self.current_manifest) {
+            MaybePackage::Package(ref p) => Ok(p),
+            MaybePackage::Virtual(..) => {
+                bail!("manifest path `{}` is a virtual manifest, but this \
+                       command requires running against an actual package in \
+                       this workspace", self.current_manifest.display())
+            }
+        }
+    }
+
+    /// Returns the `Config` this workspace is associated with.
+    pub fn config(&self) -> &'cfg Config {
+        self.config
+    }
+
+    /// Returns the root path of this workspace.
+    ///
+    /// That is, this returns the path of the directory containing the
+    /// `Cargo.toml` which is the root of this workspace.
+    pub fn root(&self) -> &Path {
+        match self.root_manifest {
+            Some(ref p) => p,
+            None => &self.current_manifest
+        }.parent().unwrap()
+    }
+
+    /// Returns the root [replace] section of this workspace.
+    ///
+    /// This may be from a virtual crate or an actual crate.
+    pub fn root_replace(&self) -> &[(PackageIdSpec, Dependency)] {
+        let path = match self.root_manifest {
+            Some(ref p) => p,
+            None => &self.current_manifest,
+        };
+        match *self.packages.get(path) {
+            MaybePackage::Package(ref p) => p.manifest().replace(),
+            MaybePackage::Virtual(ref v) => v.replace(),
+        }
+    }
+
+    /// Returns an iterator over all packages in this workspace
+    pub fn members<'a>(&'a self) -> Members<'a, 'cfg> {
+        Members {
+            ws: self,
+            iter: self.members.iter(),
+        }
+    }
+
+    /// Finds the root of a workspace for the crate whose manifest is located
+    /// at `manifest_path`.
+    ///
+    /// This will parse the `Cargo.toml` at `manifest_path` and then interpret
+    /// the workspace configuration, optionally walking up the filesystem
+    /// looking for other workspace roots.
+    ///
+    /// Returns an error if `manifest_path` isn't actually a valid manifest or
+    /// if some other transient error happens.
+    fn find_root(&mut self, manifest_path: &Path)
+                 -> CargoResult<Option<PathBuf>> {
+        {
+            let current = try!(self.packages.load(&manifest_path));
+            match *current.workspace_config() {
+                WorkspaceConfig::Root { .. } => {
+                    debug!("find_root - is root {}", manifest_path.display());
+                    return Ok(Some(manifest_path.to_path_buf()))
+                }
+                WorkspaceConfig::Member { root: Some(ref path_to_root) } => {
+                    let path = manifest_path.parent().unwrap()
+                                            .join(path_to_root)
+                                            .join("Cargo.toml");
+                    debug!("find_root - pointer {}", path.display());
+                    return Ok(Some(paths::normalize_path(&path)))
+                }
+                WorkspaceConfig::Member { root: None } => {}
+            }
+        }
+
+        let mut cur = manifest_path.parent().and_then(|p| p.parent());
+        while let Some(path) = cur {
+            let manifest = path.join("Cargo.toml");
+            debug!("find_root - trying {}", manifest.display());
+            if let Ok(pkg) = self.packages.load(&manifest) {
+                match *pkg.workspace_config() {
+                    WorkspaceConfig::Root { .. } => {
+                        debug!("find_root - found");
+                        return Ok(Some(manifest))
+                    }
+                    WorkspaceConfig::Member { .. } => {}
+                }
+            }
+            cur = path.parent();
+        }
+
+        Ok(None)
+    }
+
+    /// After the root of a workspace has been located, probes for all members
+    /// of a workspace.
+    ///
+    /// If the `workspace.members` configuration is present, then this just
+    /// verifies that those are all valid packages to point to. Otherwise, this
+    /// will transitively follow all `path` dependencies looking for members of
+    /// the workspace.
+    fn find_members(&mut self) -> CargoResult<()> {
+        let root_manifest = match self.root_manifest {
+            Some(ref path) => path.clone(),
+            None => {
+                debug!("find_members - only me as a member");
+                self.members.push(self.current_manifest.clone());
+                return Ok(())
+            }
+        };
+        let members = {
+            let root = try!(self.packages.load(&root_manifest));
+            match *root.workspace_config() {
+                WorkspaceConfig::Root { ref members } => members.clone(),
+                _ => bail!("root of a workspace inferred but wasn't a root: {}",
+                           root_manifest.display()),
+            }
+        };
+
+        if let Some(list) = members {
+            let root = root_manifest.parent().unwrap();
+            for path in list {
+                let manifest_path = root.join(path).join("Cargo.toml");
+                try!(self.find_path_deps(&manifest_path));
+            }
+        }
+
+        self.find_path_deps(&root_manifest)
+    }
+
+    fn find_path_deps(&mut self, manifest_path: &Path) -> CargoResult<()> {
+        if self.members.iter().any(|p| p == manifest_path) {
+            return Ok(())
+        }
+
+        debug!("find_members - {}", manifest_path.display());
+        self.members.push(manifest_path.to_path_buf());
+
+        let candidates = {
+            let pkg = match *try!(self.packages.load(manifest_path)) {
+                MaybePackage::Package(ref p) => p,
+                MaybePackage::Virtual(_) => return Ok(()),
+            };
+            pkg.dependencies()
+               .iter()
+               .map(|d| d.source_id())
+               .filter(|d| d.is_path())
+               .filter_map(|d| d.url().to_file_path().ok())
+               .map(|p| p.join("Cargo.toml"))
+               .collect::<Vec<_>>()
+        };
+        for candidate in candidates {
+            try!(self.find_path_deps(&candidate));
+        }
+        Ok(())
+    }
+
+    /// Validates a workspace, ensuring that a number of invariants are upheld:
+    ///
+    /// 1. A workspace only has one root.
+    /// 2. All workspace members agree on this one root as the root.
+    /// 3. The current crate is a member of this workspace.
+    fn validate(&mut self) -> CargoResult<()> {
+        if self.root_manifest.is_none() {
+            return Ok(())
+        }
+
+        let mut roots = Vec::new();
+        {
+            let mut names = BTreeMap::new();
+            for member in self.members.iter() {
+                let package = self.packages.get(member);
+                match *package.workspace_config() {
+                    WorkspaceConfig::Root { .. } => {
+                        roots.push(member.parent().unwrap().to_path_buf());
+                    }
+                    WorkspaceConfig::Member { .. } => {}
+                }
+                let name = match *package {
+                    MaybePackage::Package(ref p) => p.name(),
+                    MaybePackage::Virtual(_) => continue,
+                };
+                if let Some(prev) = names.insert(name, member) {
+                    bail!("two packages named `{}` in this workspace:\n\
+                           - {}\n\
+                           - {}", name, prev.display(), member.display());
+                }
+            }
+        }
+
+        match roots.len() {
+            0 => {
+                bail!("`package.workspace` configuration points to a crate \
+                       which is not configured with [workspace]: \n\
+                       configuration at: {}\n\
+                       points to: {}",
+                      self.current_manifest.display(),
+                      self.root_manifest.as_ref().unwrap().display())
+            }
+            1 => {}
+            _ => {
+                bail!("multiple workspace roots found in the same workspace:\n{}",
+                      roots.iter()
+                           .map(|r| format!("  {}", r.display()))
+                           .collect::<Vec<_>>()
+                           .join("\n"));
+            }
+        }
+
+        for member in self.members.clone() {
+            let root = try!(self.find_root(&member));
+            if root == self.root_manifest {
+                continue
+            }
+
+            match root {
+                Some(root) => {
+                    bail!("package `{}` is a member of the wrong workspace\n\
+                           expected: {}\n\
+                           actual:   {}",
+                          member.display(),
+                          self.root_manifest.as_ref().unwrap().display(),
+                          root.display());
+                }
+                None => {
+                    bail!("workspace member `{}` is not hierarchically below \
+                           the workspace root `{}`",
+                          member.display(),
+                          self.root_manifest.as_ref().unwrap().display());
+                }
+            }
+        }
+
+        if !self.members.contains(&self.current_manifest) {
+            let root = self.root_manifest.as_ref().unwrap();
+            let root_dir = root.parent().unwrap();
+            let current_dir = self.current_manifest.parent().unwrap();
+            let root_pkg = self.packages.get(root);
+
+            let members_msg = match current_dir.strip_prefix(root_dir) {
+                Ok(rel) => {
+                    format!("this may be fixable by adding `{}` to the \
+                             `workspace.members` array of the manifest \
+                             located at: {}",
+                             rel.display(),
+                             root.display())
+                }
+                Err(_) => {
+                    format!("this may be fixable by adding a member to \
+                             the `workspace.members` array of the \
+                             manifest located at: {}", root.display())
+                }
+            };
+            let extra = match *root_pkg {
+                MaybePackage::Virtual(_) => members_msg,
+                MaybePackage::Package(ref p) => {
+                    let members = match *p.manifest().workspace_config() {
+                        WorkspaceConfig::Root { ref members } => members,
+                        WorkspaceConfig::Member { .. } => unreachable!(),
+                    };
+                    if members.is_none() {
+                        format!("this may be fixable by ensuring that this \
+                                 crate is depended on by the workspace \
+                                 root: {}", root.display())
+                    } else {
+                        members_msg
+                    }
+                }
+            };
+            bail!("current package believes it's in a workspace when it's not:\n\
+                   current:   {}\n\
+                   workspace: {}\n\n{}",
+                  self.current_manifest.display(),
+                  root.display(),
+                  extra);
+        }
+
+        Ok(())
+    }
+}
+
+impl<'cfg> Packages<'cfg> {
+    fn get(&self, manifest_path: &Path) -> &MaybePackage {
+        &self.packages[manifest_path.parent().unwrap()]
+    }
+
+    fn load(&mut self, manifest_path: &Path) -> CargoResult<&MaybePackage> {
+        let key = manifest_path.parent().unwrap();
+        match self.packages.entry(key.to_path_buf()) {
+            Entry::Occupied(e) => Ok(e.into_mut()),
+            Entry::Vacant(v) => {
+                let source_id = try!(SourceId::for_path(key));
+                let pair = try!(ops::read_manifest(&manifest_path, &source_id,
+                                                   self.config));
+                let (manifest, _nested_paths) = pair;
+                Ok(v.insert(match manifest {
+                    EitherManifest::Real(manifest) => {
+                        MaybePackage::Package(Package::new(manifest,
+                                                           manifest_path))
+                    }
+                    EitherManifest::Virtual(v) => {
+                        MaybePackage::Virtual(v)
+                    }
+                }))
+            }
+        }
+    }
+}
+
+impl<'a, 'cfg> Iterator for Members<'a, 'cfg> {
+    type Item = &'a Package;
+
+    fn next(&mut self) -> Option<&'a Package> {
+        loop {
+            let next = self.iter.next().map(|path| {
+                self.ws.packages.get(path)
+            });
+            match next {
+                Some(&MaybePackage::Package(ref p)) => return Some(p),
+                Some(&MaybePackage::Virtual(_)) => {}
+                None => return None,
+            }
+        }
+    }
+}
+
+impl MaybePackage {
+    fn workspace_config(&self) -> &WorkspaceConfig {
+        match *self {
+            MaybePackage::Virtual(ref v) => v.workspace_config(),
+            MaybePackage::Package(ref v) => v.manifest().workspace_config(),
+        }
+    }
+}
index 1c70176af1d11c4bbd1307360ce241aaab740cb9..8306859ef5c18ef7bed4ce9afc40f8c5d421163d 100644 (file)
@@ -2,7 +2,7 @@ use std::default::Default;
 use std::fs;
 use std::path::Path;
 
-use core::{Package, Profiles};
+use core::{Profiles, Workspace};
 use core::registry::PackageRegistry;
 use util::{CargoResult, human, ChainError, Config};
 use ops::{self, Layout, Context, BuildConfig, Kind, Unit};
@@ -15,9 +15,8 @@ pub struct CleanOptions<'a> {
 }
 
 /// Cleans the project from build artifacts.
-pub fn clean(manifest_path: &Path, opts: &CleanOptions) -> CargoResult<()> {
-    let root = try!(Package::for_path(manifest_path, opts.config));
-    let target_dir = opts.config.target_dir(&root);
+pub fn clean(ws: &Workspace, opts: &CleanOptions) -> CargoResult<()> {
+    let target_dir = opts.config.target_dir(&ws);
 
     // If we have a spec, then we need to delete some packages, otherwise, just
     // remove the whole target directory and be done with it!
@@ -30,22 +29,21 @@ pub fn clean(manifest_path: &Path, opts: &CleanOptions) -> CargoResult<()> {
     }
 
     let mut registry = PackageRegistry::new(opts.config);
-    let resolve = try!(ops::resolve_pkg(&mut registry, &root, opts.config));
+    let resolve = try!(ops::resolve_ws(&mut registry, ws));
     let packages = ops::get_resolved_packages(&resolve, registry);
 
     let dest = if opts.release {"release"} else {"debug"};
-    let host_layout = try!(Layout::new(opts.config, &root, None, dest));
+    let host_layout = try!(Layout::new(ws, None, dest));
     let target_layout = match opts.target {
-        Some(target) => {
-            Some(try!(Layout::new(opts.config, &root, Some(target), dest)))
-        }
+        Some(target) => Some(try!(Layout::new(ws, Some(target), dest))),
         None => None,
     };
 
+    let profiles = try!(ws.current()).manifest().profiles();
     let mut cx = try!(Context::new(&resolve, &packages, opts.config,
                                    host_layout, target_layout,
                                    BuildConfig::default(),
-                                   root.manifest().profiles()));
+                                   profiles));
     let mut units = Vec::new();
 
     for spec in opts.spec {
@@ -59,7 +57,7 @@ pub fn clean(manifest_path: &Path, opts: &CleanOptions) -> CargoResult<()> {
                 let Profiles {
                     ref release, ref dev, ref test, ref bench, ref doc,
                     ref custom_build, ref test_deps, ref bench_deps,
-                } = *root.manifest().profiles();
+                } = *profiles;
                 let profiles = [release, dev, test, bench, doc, custom_build,
                                 test_deps, bench_deps];
                 for profile in profiles.iter() {
index 30854a10b4b243db4aa4fbc4f472f5a73a4d4fe1..8f175324aabe43038be858d316c0c6467a314fc9 100644 (file)
 //!
 
 use std::collections::HashMap;
-use std::default::Default;
-use std::path::{Path, PathBuf};
+use std::path::PathBuf;
 use std::sync::Arc;
 
 use core::registry::PackageRegistry;
 use core::{Source, SourceId, PackageSet, Package, Target};
-use core::{Profile, TargetKind, Profiles};
+use core::{Profile, TargetKind, Profiles, Workspace};
 use core::resolver::{Method, Resolve};
 use ops::{self, BuildOutput, ExecEngine};
 use sources::PathSource;
@@ -84,43 +83,37 @@ pub enum CompileFilter<'a> {
     }
 }
 
-pub fn compile<'a>(manifest_path: &Path,
-                   options: &CompileOptions<'a>)
+pub fn compile<'a>(ws: &Workspace<'a>, options: &CompileOptions<'a>)
                    -> CargoResult<ops::Compilation<'a>> {
-    debug!("compile; manifest-path={}", manifest_path.display());
-
-    let package = try!(Package::for_path(manifest_path, options.config));
-    debug!("loaded package; package={}", package);
-
-    for key in package.manifest().warnings().iter() {
+    for key in try!(ws.current()).manifest().warnings().iter() {
         try!(options.config.shell().warn(key))
     }
-    compile_pkg(&package, None, options)
+    compile_ws(ws, None, options)
 }
 
-pub fn resolve_dependencies<'a>(root_package: &Package,
-                                config: &'a Config,
+pub fn resolve_dependencies<'a>(ws: &Workspace<'a>,
                                 source: Option<Box<Source + 'a>>,
                                 features: Vec<String>,
                                 no_default_features: bool)
                                 -> CargoResult<(PackageSet<'a>, Resolve)> {
 
-    let mut registry = PackageRegistry::new(config);
+    let mut registry = PackageRegistry::new(ws.config());
 
     if let Some(source) = source {
-        registry.add_preloaded(root_package.package_id().source_id(), source);
+        registry.add_preloaded(try!(ws.current()).package_id().source_id(),
+                               source);
     }
 
     // First, resolve the root_package's *listed* dependencies, as well as
     // downloading and updating all remotes and such.
-    let resolve = try!(ops::resolve_pkg(&mut registry, root_package, config));
+    let resolve = try!(ops::resolve_ws(&mut registry, ws));
 
     // Second, resolve with precisely what we're doing. Filter out
     // transitive dependencies if necessary, specify features, handle
     // overrides, etc.
     let _p = profile::start("resolving w/ overrides...");
 
-    try!(add_overrides(&mut registry, root_package.root(), config));
+    try!(add_overrides(&mut registry, ws));
 
     let method = Method::Required{
         dev_deps: true, // TODO: remove this option?
@@ -129,7 +122,7 @@ pub fn resolve_dependencies<'a>(root_package: &Package,
     };
 
     let resolved_with_overrides =
-            try!(ops::resolve_with_previous(&mut registry, root_package,
+            try!(ops::resolve_with_previous(&mut registry, ws,
                                             method, Some(&resolve), None));
 
     let packages = ops::get_resolved_packages(&resolved_with_overrides,
@@ -138,10 +131,11 @@ pub fn resolve_dependencies<'a>(root_package: &Package,
     Ok((packages, resolved_with_overrides))
 }
 
-pub fn compile_pkg<'a>(root_package: &Package,
-                       source: Option<Box<Source + 'a>>,
-                       options: &CompileOptions<'a>)
-                       -> CargoResult<ops::Compilation<'a>> {
+pub fn compile_ws<'a>(ws: &Workspace<'a>,
+                      source: Option<Box<Source + 'a>>,
+                      options: &CompileOptions<'a>)
+                      -> CargoResult<ops::Compilation<'a>> {
+    let root_package = try!(ws.current());
     let CompileOptions { config, jobs, target, spec, features,
                          no_default_features, release, mode,
                          ref filter, ref exec_engine,
@@ -163,8 +157,7 @@ pub fn compile_pkg<'a>(root_package: &Package,
     }
 
     let (packages, resolve_with_overrides) = {
-        try!(resolve_dependencies(root_package, config, source, features,
-                                  no_default_features))
+        try!(resolve_dependencies(ws, source, features, no_default_features))
     };
 
     let mut pkgids = Vec::new();
@@ -241,13 +234,13 @@ pub fn compile_pkg<'a>(root_package: &Package,
             build_config.doc_all = deps;
         }
 
-        try!(ops::compile_targets(&package_targets,
+        try!(ops::compile_targets(ws,
+                                  &package_targets,
                                   &packages,
                                   &resolve_with_overrides,
                                   config,
                                   build_config,
-                                  root_package.manifest().profiles(),
-                                  ))
+                                  profiles))
     };
 
     ret.to_doc_test = to_builds.iter().map(|&p| p.clone()).collect();
@@ -390,12 +383,12 @@ fn generate_targets<'a>(pkg: &'a Package,
 /// Read the `paths` configuration variable to discover all path overrides that
 /// have been configured.
 fn add_overrides<'a>(registry: &mut PackageRegistry<'a>,
-                     cur_path: &Path,
-                     config: &'a Config) -> CargoResult<()> {
-    let paths = match try!(config.get_list("paths")) {
+                     ws: &Workspace<'a>) -> CargoResult<()> {
+    let paths = match try!(ws.config().get_list("paths")) {
         Some(list) => list,
         None => return Ok(())
     };
+    let current = try!(ws.current());
     let paths = paths.val.iter().map(|&(ref s, ref p)| {
         // The path listed next to the string is the config file in which the
         // key was located, so we want to pop off the `.cargo/config` component
@@ -404,12 +397,12 @@ fn add_overrides<'a>(registry: &mut PackageRegistry<'a>,
     }).filter(|&(ref p, _)| {
         // Make sure we don't override the local package, even if it's in the
         // list of override paths.
-        cur_path != &**p
+        current.root() != &**p
     });
 
     for (path, definition) in paths {
         let id = try!(SourceId::for_path(&path));
-        let mut source = PathSource::new_recursive(&path, &id, config);
+        let mut source = PathSource::new_recursive(&path, &id, ws.config());
         try!(source.update().chain_error(|| {
             human(format!("failed to update path override `{}` \
                            (defined in `{}`)", path.display(),
index 43a6621d750fac7e722a620e5bc8f122525c9454..c8a8b4719a2e4a9a9fa5ed456fee483123eeaa35 100644 (file)
@@ -3,7 +3,7 @@ use std::fs;
 use std::path::Path;
 use std::process::Command;
 
-use core::{Package, PackageIdSpec};
+use core::{PackageIdSpec, Workspace};
 use ops;
 use util::CargoResult;
 
@@ -12,9 +12,9 @@ pub struct DocOptions<'a> {
     pub compile_opts: ops::CompileOptions<'a>,
 }
 
-pub fn doc(manifest_path: &Path,
+pub fn doc(ws: &Workspace,
            options: &DocOptions) -> CargoResult<()> {
-    let package = try!(Package::for_path(manifest_path, options.compile_opts.config));
+    let package = try!(ws.current());
 
     let mut lib_names = HashSet::new();
     let mut bin_names = HashSet::new();
@@ -35,7 +35,7 @@ pub fn doc(manifest_path: &Path,
         }
     }
 
-    try!(ops::compile(manifest_path, &options.compile_opts));
+    try!(ops::compile(ws, &options.compile_opts));
 
     if options.open_result {
         let name = if options.compile_opts.spec.len() > 1 {
@@ -53,7 +53,7 @@ pub fn doc(manifest_path: &Path,
         // Don't bother locking here as if this is getting deleted there's
         // nothing we can do about it and otherwise if it's getting overwritten
         // then that's also ok!
-        let target_dir = options.compile_opts.config.target_dir(&package);
+        let target_dir = options.compile_opts.config.target_dir(ws);
         let path = target_dir.join("doc").join(&name).join("index.html");
         let path = path.into_path_unlocked();
         if fs::metadata(&path).is_ok() {
index e8f401d5333b835c3519aae564eb52608df2537c..03b19ca5d19c5e8e59a686746f2a90187a9ca3fd 100644 (file)
@@ -1,17 +1,12 @@
-use std::path::Path;
-
 use core::registry::PackageRegistry;
-use core::{Package, PackageId, Resolve, PackageSet};
+use core::{PackageId, Resolve, PackageSet, Workspace};
 use ops;
-use util::{CargoResult, Config};
+use util::CargoResult;
 
 /// Executes `cargo fetch`.
-pub fn fetch<'a>(manifest_path: &Path,
-                 config: &'a Config)
-                 -> CargoResult<(Resolve, PackageSet<'a>)> {
-    let package = try!(Package::for_path(manifest_path, config));
-    let mut registry = PackageRegistry::new(config);
-    let resolve = try!(ops::resolve_pkg(&mut registry, &package, config));
+pub fn fetch<'a>(ws: &Workspace<'a>) -> CargoResult<(Resolve, PackageSet<'a>)> {
+    let mut registry = PackageRegistry::new(ws.config());
+    let resolve = try!(ops::resolve_ws(&mut registry, ws));
     let packages = get_resolved_packages(&resolve, registry);
     for id in resolve.iter() {
         try!(packages.get(id));
index 14c3322c5a8aeff700012619ef85519d019ce4f6..b196dec916bd7a983c42236950355e0875d05127 100644 (file)
@@ -1,9 +1,8 @@
 use std::collections::{BTreeMap, HashSet};
-use std::path::Path;
 
 use core::PackageId;
 use core::registry::PackageRegistry;
-use core::{Resolve, SourceId, Package};
+use core::{Resolve, SourceId, Workspace};
 use core::resolver::Method;
 use ops;
 use util::config::Config;
@@ -16,29 +15,25 @@ pub struct UpdateOptions<'a> {
     pub aggressive: bool,
 }
 
-pub fn generate_lockfile(manifest_path: &Path, config: &Config)
-                         -> CargoResult<()> {
-    let package = try!(Package::for_path(manifest_path, config));
-    let mut registry = PackageRegistry::new(config);
-    let resolve = try!(ops::resolve_with_previous(&mut registry, &package,
+pub fn generate_lockfile(ws: &Workspace) -> CargoResult<()> {
+    let mut registry = PackageRegistry::new(ws.config());
+    let resolve = try!(ops::resolve_with_previous(&mut registry, ws,
                                                   Method::Everything,
                                                   None, None));
-    try!(ops::write_pkg_lockfile(&package, &resolve, config));
+    try!(ops::write_pkg_lockfile(ws, &resolve));
     Ok(())
 }
 
-pub fn update_lockfile(manifest_path: &Path,
-                       opts: &UpdateOptions) -> CargoResult<()> {
+pub fn update_lockfile(ws: &Workspace, opts: &UpdateOptions)
+                       -> CargoResult<()> {
 
     if opts.aggressive && opts.precise.is_some() {
         bail!("cannot specify both aggressive and precise simultaneously")
     }
 
-    let package = try!(Package::for_path(manifest_path, opts.config));
-
-    let previous_resolve = match try!(ops::load_pkg_lockfile(&package, opts.config)) {
+    let previous_resolve = match try!(ops::load_pkg_lockfile(ws)) {
         Some(resolve) => resolve,
-        None => return generate_lockfile(manifest_path, opts.config),
+        None => return generate_lockfile(ws),
     };
     let mut registry = PackageRegistry::new(opts.config);
     let mut to_avoid = HashSet::new();
@@ -76,7 +71,7 @@ pub fn update_lockfile(manifest_path: &Path,
     }
 
     let resolve = try!(ops::resolve_with_previous(&mut registry,
-                                                  &package,
+                                                  ws,
                                                   Method::Everything,
                                                   Some(&previous_resolve),
                                                   Some(&to_avoid)));
@@ -104,7 +99,7 @@ pub fn update_lockfile(manifest_path: &Path,
         }
     }
 
-    try!(ops::write_pkg_lockfile(&package, &resolve, opts.config));
+    try!(ops::write_pkg_lockfile(&ws, &resolve));
     return Ok(());
 
     fn fill_with_deps<'a>(resolve: &'a Resolve, dep: &'a PackageId,
index 6744e77bc8dfff7e3a733634c31397d02d54998a..071bb58c8c8f3fe6a7273858cd78fe8039bbdcde 100644 (file)
@@ -11,7 +11,7 @@ use tempdir::TempDir;
 use toml;
 
 use core::{SourceId, Source, Package, Dependency, PackageIdSpec};
-use core::PackageId;
+use core::{PackageId, Workspace};
 use ops::{self, CompileFilter};
 use sources::{GitSource, PathSource, RegistrySource};
 use util::{CargoResult, ChainError, Config, human, internal};
@@ -76,6 +76,8 @@ pub fn install(root: Option<&str>,
                                             crates.io, or use --path or --git to \
                                             specify alternate source"))))
     };
+    let ws = Workspace::one(pkg, config);
+    let pkg = try!(ws.current());
 
     // Preflight checks to check up front whether we'll overwrite something.
     // We have to check this again afterwards, but may as well avoid building
@@ -84,12 +86,12 @@ pub fn install(root: Option<&str>,
         let metadata = try!(metadata(config, &root));
         let list = try!(read_crate_list(metadata.file()));
         let dst = metadata.parent().join("bin");
-        try!(check_overwrites(&dst, &pkg, &opts.filter, &list, force));
+        try!(check_overwrites(&dst, pkg, &opts.filter, &list, force));
     }
 
     let mut td_opt = None;
     let target_dir = if source_id.is_path() {
-        config.target_dir(&pkg)
+        config.target_dir(&ws)
     } else {
         if let Ok(td) = TempDir::new("cargo-install") {
             let p = td.path().to_owned();
@@ -100,7 +102,7 @@ pub fn install(root: Option<&str>,
         }
     };
     config.set_target_dir(target_dir.clone());
-    let compile = try!(ops::compile_pkg(&pkg, Some(source), opts).chain_error(|| {
+    let compile = try!(ops::compile_ws(&ws, Some(source), opts).chain_error(|| {
         if let Some(td) = td_opt.take() {
             // preserve the temporary directory, so the user can inspect it
             td.into_path();
@@ -121,7 +123,8 @@ pub fn install(root: Option<&str>,
     let metadata = try!(metadata(config, &root));
     let mut list = try!(read_crate_list(metadata.file()));
     let dst = metadata.parent().join("bin");
-    let duplicates = try!(check_overwrites(&dst, &pkg, &opts.filter, &list, force));
+    let duplicates = try!(check_overwrites(&dst, pkg, &opts.filter,
+                                           &list, force));
 
     try!(fs::create_dir_all(&dst));
 
index 754ef82eaef0537fc51d5f4232e7b06c5ebfb9aa..856f06ba048636bf6897cebe63d789f631d33fe5 100644 (file)
@@ -9,6 +9,7 @@ use git2::Config as GitConfig;
 
 use term::color::BLACK;
 
+use core::Workspace;
 use util::{GitRepo, HgRepo, CargoResult, human, ChainError, internal};
 use util::{Config, paths};
 
@@ -442,10 +443,16 @@ mod tests {
         };
 
         if !fs::metadata(&path_of_source_file).map(|x| x.is_file()).unwrap_or(false) {
-            return paths::write(&path_of_source_file, default_file_content)
+            try!(paths::write(&path_of_source_file, default_file_content));
         }
     }
 
+    if let Err(e) = Workspace::new(&path.join("Cargo.toml"), config) {
+        let msg = format!("compiling this new crate may not work due to invalid \
+                           workspace configuration\n\n{}", e);
+        try!(config.shell().warn(msg));
+    }
+
     Ok(())
 }
 
index d5088bab2a1779a40fe2825fecb8269748f38f4b..39afefc854d0da754e0c85576668cb7083522461 100644 (file)
@@ -1,18 +1,14 @@
-use std::path::Path;
-
 use rustc_serialize::{Encodable, Encoder};
 
 use core::resolver::Resolve;
-use core::{Package, PackageId, PackageSet};
+use core::{Package, PackageId, Workspace};
 use ops;
-use util::config::Config;
 use util::CargoResult;
 
 const VERSION: u32 = 1;
 
-pub struct OutputMetadataOptions<'a> {
+pub struct OutputMetadataOptions {
     pub features: Vec<String>,
-    pub manifest_path: &'a Path,
     pub no_default_features: bool,
     pub no_deps: bool,
     pub version: u32,
@@ -21,32 +17,34 @@ pub struct OutputMetadataOptions<'a> {
 /// Loads the manifest, resolves the dependencies of the project to the concrete
 /// used versions - considering overrides - and writes all dependencies in a JSON
 /// format to stdout.
-pub fn output_metadata(opt: OutputMetadataOptions, config: &Config) -> CargoResult<ExportInfo> {
+pub fn output_metadata(ws: &Workspace,
+                       opt: &OutputMetadataOptions) -> CargoResult<ExportInfo> {
     if opt.version != VERSION {
         bail!("metadata version {} not supported, only {} is currently supported",
               opt.version, VERSION);
     }
     if opt.no_deps {
-        metadata_no_deps(opt, config)
+        metadata_no_deps(ws, opt)
     } else {
-        metadata_full(opt, config)
+        metadata_full(ws, opt)
     }
 }
 
-fn metadata_no_deps(opt: OutputMetadataOptions, config: &Config) -> CargoResult<ExportInfo> {
-    let root = try!(Package::for_path(opt.manifest_path, config));
+fn metadata_no_deps(ws: &Workspace,
+                    _opt: &OutputMetadataOptions) -> CargoResult<ExportInfo> {
     Ok(ExportInfo {
-        packages: vec![root],
+        packages: vec![try!(ws.current()).clone()],
         resolve: None,
         version: VERSION,
     })
 }
 
-fn metadata_full(opt: OutputMetadataOptions, config: &Config) -> CargoResult<ExportInfo> {
-    let deps = try!(resolve_dependencies(opt.manifest_path,
-                                         config,
-                                         opt.features,
-                                         opt.no_default_features));
+fn metadata_full(ws: &Workspace,
+                 opt: &OutputMetadataOptions) -> CargoResult<ExportInfo> {
+    let deps = try!(ops::resolve_dependencies(ws,
+                                              None,
+                                              opt.features.clone(),
+                                              opt.no_default_features));
     let (packages, resolve) = deps;
 
     let packages = try!(packages.package_ids()
@@ -100,18 +98,3 @@ impl Encodable for MetadataResolve {
         encodable.encode(s)
     }
 }
-
-/// Loads the manifest and resolves the dependencies of the project to the
-/// concrete used versions. Afterwards available overrides of dependencies are applied.
-fn resolve_dependencies<'a>(manifest: &Path,
-                            config: &'a Config,
-                            features: Vec<String>,
-                            no_default_features: bool)
-                            -> CargoResult<(PackageSet<'a>, Resolve)> {
-    let package = try!(Package::for_path(manifest, config));
-    ops::resolve_dependencies(&package,
-                              config,
-                              None,
-                              features,
-                              no_default_features)
-}
index 77113795bc31dd5c8f149688fade532e14e34833..e00e0b4e4748457fc40d0e08cb5355a5b190add3 100644 (file)
@@ -8,7 +8,7 @@ use flate2::{GzBuilder, Compression};
 use git2;
 use tar::{Archive, Builder, Header};
 
-use core::{SourceId, Package, PackageId};
+use core::{SourceId, Package, PackageId, Workspace, Source};
 use sources::PathSource;
 use util::{self, CargoResult, human, internal, ChainError, Config, FileLock};
 use ops;
@@ -21,16 +21,17 @@ pub struct PackageOpts<'cfg> {
     pub verify: bool,
 }
 
-pub fn package(manifest_path: &Path,
+pub fn package(ws: &Workspace,
                opts: &PackageOpts) -> CargoResult<Option<FileLock>> {
-    let config = opts.config;
-    let path = manifest_path.parent().unwrap();
-    let id = try!(SourceId::for_path(path));
-    let mut src = PathSource::new(path, &id, config);
-    let pkg = try!(src.root_package());
+    let pkg = try!(ws.current());
+    let config = ws.config();
+    let mut src = PathSource::new(pkg.root(),
+                                  pkg.package_id().source_id(),
+                                  config);
+    try!(src.update());
 
     if opts.check_metadata {
-        try!(check_metadata(&pkg, config));
+        try!(check_metadata(pkg, config));
     }
 
     if opts.list {
@@ -50,7 +51,7 @@ pub fn package(manifest_path: &Path,
     }
 
     let filename = format!("{}-{}.crate", pkg.name(), pkg.version());
-    let dir = config.target_dir(&pkg).join("package");
+    let dir = config.target_dir(ws).join("package");
     let mut dst = match dir.open_ro(&filename, config, "packaged crate") {
         Ok(f) => return Ok(Some(f)),
         Err(..) => {
@@ -65,12 +66,12 @@ pub fn package(manifest_path: &Path,
     // it exists.
     try!(config.shell().status("Packaging", pkg.package_id().to_string()));
     try!(dst.file().set_len(0));
-    try!(tar(&pkg, &src, config, dst.file(), &filename).chain_error(|| {
+    try!(tar(ws, &src, dst.file(), &filename).chain_error(|| {
         human("failed to prepare local package for uploading")
     }));
     if opts.verify {
         try!(dst.seek(SeekFrom::Start(0)));
-        try!(run_verify(config, &pkg, dst.file()).chain_error(|| {
+        try!(run_verify(ws, dst.file()).chain_error(|| {
             human("failed to verify package tarball")
         }))
     }
@@ -165,9 +166,8 @@ fn check_not_dirty(p: &Package, src: &PathSource) -> CargoResult<()> {
     }
 }
 
-fn tar(pkg: &Package,
+fn tar(ws: &Workspace,
        src: &PathSource,
-       config: &Config,
        dst: &File,
        filename: &str) -> CargoResult<()> {
     // Prepare the encoder and its header
@@ -177,6 +177,8 @@ fn tar(pkg: &Package,
 
     // Put all package files into a compressed archive
     let mut ar = Builder::new(encoder);
+    let pkg = try!(ws.current());
+    let config = ws.config();
     let root = pkg.root();
     for file in try!(src.list_files(pkg)).iter() {
         let relative = util::without_prefix(&file, &root).unwrap();
@@ -229,10 +231,10 @@ fn tar(pkg: &Package,
     Ok(())
 }
 
-fn run_verify(config: &Config,
-              pkg: &Package,
-              tar: &File)
-              -> CargoResult<()> {
+fn run_verify(ws: &Workspace, tar: &File) -> CargoResult<()> {
+    let config = ws.config();
+    let pkg = try!(ws.current());
+
     try!(config.shell().status("Verifying", pkg));
 
     let f = try!(GzDecoder::new(tar));
@@ -266,7 +268,8 @@ fn run_verify(config: &Config,
     let new_pkg = Package::new(new_manifest, &manifest_path);
 
     // Now that we've rewritten all our path dependencies, compile it!
-    try!(ops::compile_pkg(&new_pkg, None, &ops::CompileOptions {
+    let ws = Workspace::one(new_pkg, config);
+    try!(ops::compile_ws(&ws, None, &ops::CompileOptions {
         config: config,
         jobs: None,
         target: None,
index 6c6b1a88c0e45c9448feab37ae21aaddc5e9c31d..94737d507d41739521fd35e153fb69608905a396 100644 (file)
@@ -1,21 +1,16 @@
-use std::path::Path;
-
 use ops;
-use core::{PackageIdSpec, Package};
-use util::{CargoResult, Config};
+use core::{PackageIdSpec, Workspace};
+use util::CargoResult;
 
-pub fn pkgid(manifest_path: &Path,
-             spec: Option<&str>,
-             config: &Config) -> CargoResult<PackageIdSpec> {
-    let package = try!(Package::for_path(manifest_path, config));
-    let resolve = match try!(ops::load_pkg_lockfile(&package, config)) {
+pub fn pkgid(ws: &Workspace, spec: Option<&str>) -> CargoResult<PackageIdSpec> {
+    let resolve = match try!(ops::load_pkg_lockfile(ws)) {
         Some(resolve) => resolve,
         None => bail!("a Cargo.lock must exist for this command"),
     };
 
     let pkgid = match spec {
         Some(spec) => try!(PackageIdSpec::query_str(spec, resolve.iter())),
-        None => package.package_id(),
+        None => try!(ws.current()).package_id(),
     };
     Ok(PackageIdSpec::from_package_id(pkgid))
 }
index 56ae6a5e3338e911d5324f6e10b2601d51bd5551..119466bd2baa99641f4291963766c37e6c2d2160 100644 (file)
@@ -3,16 +3,19 @@ use std::fs;
 use std::io;
 use std::path::{Path, PathBuf};
 
-use core::{Package, Manifest, SourceId, PackageId};
+use core::{Package, SourceId, PackageId, EitherManifest};
 use util::{self, paths, CargoResult, human, Config, ChainError};
 use util::important_paths::find_project_manifest_exact;
 use util::toml::Layout;
 
-pub fn read_manifest(contents: &[u8], layout: Layout, source_id: &SourceId,
-                     config: &Config)
-                     -> CargoResult<(Manifest, Vec<PathBuf>)> {
+pub fn read_manifest(path: &Path, source_id: &SourceId, config: &Config)
+                     -> CargoResult<(EitherManifest, Vec<PathBuf>)> {
+    trace!("read_package; path={}; source-id={}", path.display(), source_id);
+    let contents = try!(paths::read(path));
+
+    let layout = Layout::from_project_path(path.parent().unwrap());
     let root = layout.root.clone();
-    util::toml::to_manifest(contents, source_id, layout, config).chain_error(|| {
+    util::toml::to_manifest(&contents, source_id, layout, config).chain_error(|| {
         human(format!("failed to parse manifest at `{}`",
                       root.join("Cargo.toml").display()))
     })
@@ -21,11 +24,14 @@ pub fn read_manifest(contents: &[u8], layout: Layout, source_id: &SourceId,
 pub fn read_package(path: &Path, source_id: &SourceId, config: &Config)
                     -> CargoResult<(Package, Vec<PathBuf>)> {
     trace!("read_package; path={}; source-id={}", path.display(), source_id);
-    let data = try!(paths::read(path));
-
-    let layout = Layout::from_project_path(path.parent().unwrap());
-    let (manifest, nested) =
-        try!(read_manifest(data.as_bytes(), layout, source_id, config));
+    let (manifest, nested) = try!(read_manifest(path, source_id, config));
+    let manifest = match manifest {
+        EitherManifest::Real(manifest) => manifest,
+        EitherManifest::Virtual(..) => {
+            bail!("found a virtual manifest at `{}` instead of a package \
+                   manifest", path.display())
+        }
+    };
 
     Ok((Package::new(manifest, path), nested))
 }
index 6764118f92435810a68ce4f936749aa9361dc198..9d53246e90db0f9118e6648dc289f466a5629fde 100644 (file)
@@ -2,13 +2,13 @@ use std::path::Path;
 
 use ops::{self, CompileFilter};
 use util::{self, CargoResult, process, ProcessError};
-use core::Package;
+use core::Workspace;
 
-pub fn run(manifest_path: &Path,
+pub fn run(ws: &Workspace,
            options: &ops::CompileOptions,
            args: &[String]) -> CargoResult<Option<ProcessError>> {
-    let config = options.config;
-    let root = try!(Package::for_path(manifest_path, config));
+    let config = ws.config();
+    let root = try!(ws.current());
 
     let mut bins = root.manifest().targets().iter().filter(|a| {
         !a.is_lib() && !a.is_custom_build() && match options.filter {
@@ -40,7 +40,7 @@ pub fn run(manifest_path: &Path,
         }
     }
 
-    let compile = try!(ops::compile(manifest_path, options));
+    let compile = try!(ops::compile(ws, options));
     let exe = &compile.binaries[0];
     let exe = match util::without_prefix(&exe, config.cwd()) {
         Some(path) if path.file_name() == Some(path.as_os_str())
index 978090658fe05041a265db81211d7c66478f7c4d..a7575e7fdcec1216bfa54b5926fac58d9dfa7fad 100644 (file)
@@ -49,7 +49,7 @@ use std::fs;
 use std::io;
 use std::path::{PathBuf, Path};
 
-use core::{Package, Target};
+use core::{Package, Target, Workspace};
 use util::{Config, FileLock, CargoResult, Filesystem};
 use util::hex::short_hash;
 
@@ -69,11 +69,10 @@ pub struct LayoutProxy<'a> {
 }
 
 impl Layout {
-    pub fn new(config: &Config,
-               pkg: &Package,
+    pub fn new(ws: &Workspace,
                triple: Option<&str>,
                dest: &str) -> CargoResult<Layout> {
-        let mut path = config.target_dir(pkg);
+        let mut path = ws.config().target_dir(ws);
         // Flexible target specifications often point at filenames, so interpret
         // the target triple as a Path and then just use the file stem as the
         // component for the directory name.
@@ -81,7 +80,7 @@ impl Layout {
             path.push(Path::new(triple).file_stem().unwrap());
         }
         path.push(dest);
-        Layout::at(config, path)
+        Layout::at(ws.config(), path)
     }
 
     pub fn at(config: &Config, root: Filesystem) -> CargoResult<Layout> {
index ab0b829626a559f0c1c708059e1769deb1b1049a..0c23b277bc9df54c7d704ca7f3c1cbe66b2fd6c8 100644 (file)
@@ -6,7 +6,7 @@ use std::path::{self, PathBuf};
 use std::sync::Arc;
 
 use core::{Package, PackageId, PackageSet, Target, Resolve};
-use core::{Profile, Profiles};
+use core::{Profile, Profiles, Workspace};
 use core::shell::ColorConfig;
 use util::{self, CargoResult, human};
 use util::{Config, internal, ChainError, profile, join_paths};
@@ -56,7 +56,8 @@ pub type PackagesToBuild<'a> = [(&'a Package, Vec<(&'a Target,&'a Profile)>)];
 
 // Returns a mapping of the root package plus its immediate dependencies to
 // where the compiled libraries are all located.
-pub fn compile_targets<'a, 'cfg: 'a>(pkg_targets: &'a PackagesToBuild<'a>,
+pub fn compile_targets<'a, 'cfg: 'a>(ws: &Workspace<'cfg>,
+                                     pkg_targets: &'a PackagesToBuild<'a>,
                                      packages: &'a PackageSet<'cfg>,
                                      resolve: &'a Resolve,
                                      config: &'cfg Config,
@@ -81,10 +82,10 @@ pub fn compile_targets<'a, 'cfg: 'a>(pkg_targets: &'a PackagesToBuild<'a>,
 
     let dest = if build_config.release {"release"} else {"debug"};
     let root = try!(packages.get(resolve.root()));
-    let host_layout = try!(Layout::new(config, root, None, &dest));
+    let host_layout = try!(Layout::new(ws, None, &dest));
     let target_layout = match build_config.requested_target.as_ref() {
         Some(target) => {
-            Some(try!(layout::Layout::new(config, root, Some(&target), &dest)))
+            Some(try!(layout::Layout::new(ws, Some(&target), &dest)))
         }
         None => None,
     };
index 9c0bc25e186c2bbf9ad6cd4bdd0eeaf6b2e43442..3e22a7e17b6e153a120965a15e751c42651e8578 100644 (file)
@@ -1,8 +1,8 @@
 use std::ffi::{OsString, OsStr};
-use std::path::Path;
 
 use ops::{self, ExecEngine, ProcessEngine, Compilation};
 use util::{self, CargoResult, CargoTestError, ProcessError};
+use core::Workspace;
 
 pub struct TestOptions<'a> {
     pub compile_opts: ops::CompileOptions<'a>,
@@ -11,10 +11,10 @@ pub struct TestOptions<'a> {
     pub only_doc: bool,
 }
 
-pub fn run_tests(manifest_path: &Path,
+pub fn run_tests(ws: &Workspace,
                  options: &TestOptions,
                  test_args: &[String]) -> CargoResult<Option<CargoTestError>> {
-    let compilation = try!(compile_tests(manifest_path, options));
+    let compilation = try!(compile_tests(ws, options));
 
     if options.no_run {
         return Ok(None)
@@ -47,12 +47,12 @@ pub fn run_tests(manifest_path: &Path,
     }
 }
 
-pub fn run_benches(manifest_path: &Path,
+pub fn run_benches(ws: &Workspace,
                    options: &TestOptions,
                    args: &[String]) -> CargoResult<Option<CargoTestError>> {
     let mut args = args.to_vec();
     args.push("--bench".to_string());
-    let compilation = try!(compile_tests(manifest_path, options));
+    let compilation = try!(compile_tests(ws, options));
 
     if options.no_run {
         return Ok(None)
@@ -64,11 +64,10 @@ pub fn run_benches(manifest_path: &Path,
     }
 }
 
-fn compile_tests<'a>(manifest_path: &Path,
+fn compile_tests<'a>(ws: &Workspace<'a>,
                      options: &TestOptions<'a>)
                      -> CargoResult<Compilation<'a>> {
-    let mut compilation = try!(ops::compile(manifest_path,
-                                            &options.compile_opts));
+    let mut compilation = try!(ops::compile(ws, &options.compile_opts));
     compilation.tests.sort_by(|a, b| {
         (a.0.package_id(), &a.1).cmp(&(b.0.package_id(), &b.1))
     });
index f24fe5bd5713a95923b5ca4dfac486a4d77cd949..1c3a1f1087fec827acf26a9a60c48b0a2a4243d8 100644 (file)
@@ -3,18 +3,17 @@ use std::io::prelude::*;
 use rustc_serialize::{Encodable, Decodable};
 use toml::{self, Encoder, Value};
 
-use core::{Resolve, resolver, Package};
-use util::{CargoResult, ChainError, human, Config, Filesystem};
+use core::{Resolve, resolver, Workspace};
+use util::{CargoResult, ChainError, human, Filesystem};
 use util::toml as cargo_toml;
 
-pub fn load_pkg_lockfile(pkg: &Package, config: &Config)
-                         -> CargoResult<Option<Resolve>> {
-    if !pkg.root().join("Cargo.lock").exists() {
+pub fn load_pkg_lockfile(ws: &Workspace) -> CargoResult<Option<Resolve>> {
+    if !ws.root().join("Cargo.lock").exists() {
         return Ok(None)
     }
 
-    let root = Filesystem::new(pkg.root().to_path_buf());
-    let mut f = try!(root.open_ro("Cargo.lock", config, "Cargo.lock file"));
+    let root = Filesystem::new(ws.root().to_path_buf());
+    let mut f = try!(root.open_ro("Cargo.lock", ws.config(), "Cargo.lock file"));
 
     let mut s = String::new();
     try!(f.read_to_string(&mut s).chain_error(|| {
@@ -22,18 +21,17 @@ pub fn load_pkg_lockfile(pkg: &Package, config: &Config)
     }));
 
     (|| {
-        let table = toml::Value::Table(try!(cargo_toml::parse(&s, f.path(), config)));
+        let table = try!(cargo_toml::parse(&s, f.path(), ws.config()));
+        let table = toml::Value::Table(table);
         let mut d = toml::Decoder::new(table);
         let v: resolver::EncodableResolve = try!(Decodable::decode(&mut d));
-        Ok(Some(try!(v.to_resolve(pkg, config))))
+        Ok(Some(try!(v.to_resolve(ws))))
     }).chain_error(|| {
         human(format!("failed to parse lock file at: {}", f.path().display()))
     })
 }
 
-pub fn write_pkg_lockfile(pkg: &Package,
-                          resolve: &Resolve,
-                          config: &Config) -> CargoResult<()> {
+pub fn write_pkg_lockfile(ws: &Workspace, resolve: &Resolve) -> CargoResult<()> {
     let mut e = Encoder::new();
     resolve.encode(&mut e).unwrap();
 
@@ -63,13 +61,13 @@ pub fn write_pkg_lockfile(pkg: &Package,
         None => {}
     }
 
-    let root = Filesystem::new(pkg.root().to_path_buf());
+    let root = Filesystem::new(ws.root().to_path_buf());
 
     // Load the original lockfile if it exists.
     //
     // If the lockfile contents haven't changed so don't rewrite it. This is
     // helpful on read-only filesystems.
-    let orig = root.open_ro("Cargo.lock", config, "Cargo.lock file");
+    let orig = root.open_ro("Cargo.lock", ws.config(), "Cargo.lock file");
     let orig = orig.and_then(|mut f| {
         let mut s = String::new();
         try!(f.read_to_string(&mut s));
@@ -85,13 +83,13 @@ pub fn write_pkg_lockfile(pkg: &Package,
     }
 
     // Ok, if that didn't work just write it out
-    root.open_rw("Cargo.lock", config, "Cargo.lock file").and_then(|mut f| {
+    root.open_rw("Cargo.lock", ws.config(), "Cargo.lock file").and_then(|mut f| {
         try!(f.file().set_len(0));
         try!(f.write_all(out.as_bytes()));
         Ok(())
     }).chain_error(|| {
         human(format!("failed to write {}",
-                      pkg.root().join("Cargo.lock").display()))
+                      ws.root().join("Cargo.lock").display()))
     })
 }
 
index d6e41a2d35faffe522b3aa83985f1ac79cbbb729..48edabd94623b99e3549c3850ed42c89b50209b1 100644 (file)
@@ -1,5 +1,5 @@
 pub use self::cargo_clean::{clean, CleanOptions};
-pub use self::cargo_compile::{compile, compile_pkg, resolve_dependencies, CompileOptions};
+pub use self::cargo_compile::{compile, compile_ws, resolve_dependencies, CompileOptions};
 pub use self::cargo_compile::{CompileFilter, CompileMode};
 pub use self::cargo_read_manifest::{read_manifest,read_package,read_packages};
 pub use self::cargo_rustc::{compile_targets, Compilation, Layout, Kind, Unit};
@@ -21,7 +21,7 @@ pub use self::registry::{registry_login, search, http_proxy_exists, http_handle}
 pub use self::registry::{modify_owners, yank, OwnersOptions, PublishOpts};
 pub use self::cargo_fetch::{fetch, get_resolved_packages};
 pub use self::cargo_pkgid::pkgid;
-pub use self::resolve::{resolve_pkg, resolve_with_previous};
+pub use self::resolve::{resolve_ws, resolve_with_previous};
 pub use self::cargo_output_metadata::{output_metadata, OutputMetadataOptions, ExportInfo};
 
 mod cargo_clean;
index a802165f728b436140379f8fceeaaa5072e69dc7..29d54f2b499730a1bf87f222cd96dcb6b70338c7 100644 (file)
@@ -2,7 +2,7 @@ use std::collections::HashMap;
 use std::env;
 use std::fs::{self, File};
 use std::iter::repeat;
-use std::path::{Path, PathBuf};
+use std::path::PathBuf;
 use std::time::Duration;
 
 use curl::easy::Easy;
@@ -13,7 +13,7 @@ use term::color::BLACK;
 use url::percent_encoding::{percent_encode, QUERY_ENCODE_SET};
 
 use core::source::Source;
-use core::{Package, SourceId};
+use core::{Package, SourceId, Workspace};
 use core::dependency::Kind;
 use core::manifest::ManifestMetadata;
 use ops;
@@ -37,8 +37,8 @@ pub struct PublishOpts<'cfg> {
     pub allow_dirty: bool,
 }
 
-pub fn publish(manifest_path: &Path, opts: &PublishOpts) -> CargoResult<()> {
-    let pkg = try!(Package::for_path(&manifest_path, opts.config));
+pub fn publish(ws: &Workspace, opts: &PublishOpts) -> CargoResult<()> {
+    let pkg = try!(ws.current());
 
     if !pkg.publish() {
         bail!("some crates cannot be published.\n\
@@ -52,7 +52,7 @@ pub fn publish(manifest_path: &Path, opts: &PublishOpts) -> CargoResult<()> {
 
     // Prepare a tarball, with a non-surpressable warning if metadata
     // is missing since this is being put online.
-    let tarball = try!(ops::package(manifest_path, &ops::PackageOpts {
+    let tarball = try!(ops::package(ws, &ops::PackageOpts {
         config: opts.config,
         verify: opts.verify,
         list: false,
index 5f745465598f471e026218549e085d04c315b12d..7a6abacab006604cf200d8fe309848b88214b434 100644 (file)
@@ -1,26 +1,24 @@
 use std::collections::{HashMap, HashSet};
 
-use core::{Package, PackageId, SourceId};
+use core::{PackageId, SourceId, Workspace};
 use core::registry::PackageRegistry;
 use core::resolver::{self, Resolve, Method};
 use ops;
-use util::{CargoResult, Config};
+use util::CargoResult;
 
 /// Resolve all dependencies for the specified `package` using the previous
 /// lockfile as a guide if present.
 ///
 /// This function will also write the result of resolution as a new
 /// lockfile.
-pub fn resolve_pkg(registry: &mut PackageRegistry,
-                   package: &Package,
-                   config: &Config)
+pub fn resolve_ws(registry: &mut PackageRegistry, ws: &Workspace)
                    -> CargoResult<Resolve> {
-    let prev = try!(ops::load_pkg_lockfile(package, config));
-    let resolve = try!(resolve_with_previous(registry, package,
+    let prev = try!(ops::load_pkg_lockfile(ws));
+    let resolve = try!(resolve_with_previous(registry, ws,
                                              Method::Everything,
                                              prev.as_ref(), None));
-    if package.package_id().source_id().is_path() {
-        try!(ops::write_pkg_lockfile(package, &resolve, config));
+    if try!(ws.current()).package_id().source_id().is_path() {
+        try!(ops::write_pkg_lockfile(ws, &resolve));
     }
     Ok(resolve)
 }
@@ -35,14 +33,11 @@ pub fn resolve_pkg(registry: &mut PackageRegistry,
 /// The previous resolve normally comes from a lockfile. This function does not
 /// read or write lockfiles from the filesystem.
 pub fn resolve_with_previous<'a>(registry: &mut PackageRegistry,
-                                 package: &Package,
+                                 ws: &Workspace,
                                  method: Method,
                                  previous: Option<&'a Resolve>,
                                  to_avoid: Option<&HashSet<&'a PackageId>>)
                                  -> CargoResult<Resolve> {
-    try!(registry.add_sources(&[package.package_id().source_id()
-                                       .clone()]));
-
     // Here we place an artificial limitation that all non-registry sources
     // cannot be locked at more than one revision. This means that if a git
     // repository provides more than one package, they must all be updated in
@@ -57,53 +52,89 @@ pub fn resolve_with_previous<'a>(registry: &mut PackageRegistry,
                                         .filter(|s| !s.is_registry()));
     }
 
-    let summary = package.summary().clone();
-    let (summary, replace) = match previous {
-        Some(r) => {
-            // In the case where a previous instance of resolve is available, we
-            // want to lock as many packages as possible to the previous version
-            // without disturbing the graph structure. To this end we perform
-            // two actions here:
-            //
-            // 1. We inform the package registry of all locked packages. This
-            //    involves informing it of both the locked package's id as well
-            //    as the versions of all locked dependencies. The registry will
-            //    then takes this information into account when it is queried.
-            //
-            // 2. The specified package's summary will have its dependencies
-            //    modified to their precise variants. This will instruct the
-            //    first step of the resolution process to not query for ranges
-            //    but rather for precise dependency versions.
-            //
-            //    This process must handle altered dependencies, however, as
-            //    it's possible for a manifest to change over time to have
-            //    dependencies added, removed, or modified to different version
-            //    ranges. To deal with this, we only actually lock a dependency
-            //    to the previously resolved version if the dependency listed
-            //    still matches the locked version.
-            for node in r.iter().filter(|p| keep(p, to_avoid, &to_avoid_sources)) {
-                let deps = r.deps_not_replaced(node)
-                            .filter(|p| keep(p, to_avoid, &to_avoid_sources))
-                            .cloned().collect();
-                registry.register_lock(node.clone(), deps);
+    let mut summaries = Vec::new();
+    for member in ws.members() {
+        try!(registry.add_sources(&[member.package_id().source_id()
+                                          .clone()]));
+
+        // If we're resolving everything then we include all members of the
+        // workspace. If we want a specific set of requirements then we only
+        // resolve the main crate as it's the only one we're compiling. This
+        // case should only happen after we have a previous resolution, however,
+        // so assert that the previous exists.
+        let method = match method {
+            Method::Everything => Method::Everything,
+            Method::Required { .. } => {
+                assert!(previous.is_some());
+                if member.package_id() == try!(ws.current()).package_id() {
+                    method
+                } else {
+                    continue
+                }
             }
+        };
 
-            let summary = {
-                let map = r.deps_not_replaced(r.root()).filter(|p| {
-                    keep(p, to_avoid, &to_avoid_sources)
-                }).map(|d| {
-                    (d.name(), d)
-                }).collect::<HashMap<_, _>>();
+        // If we don't have a previous instance of resolve then we just need to
+        // resolve our entire summary (method should be Everything) and we just
+        // move along to the next member.
+        let r = match previous {
+            Some(r) => r,
+            None => {
+                summaries.push((member.summary().clone(), method));
+                continue
+            }
+        };
 
-                summary.map_dependencies(|dep| {
-                    match map.get(dep.name()) {
-                        Some(&lock) if dep.matches_id(lock) => dep.lock_to(lock),
-                        _ => dep,
-                    }
-                })
-            };
-            let replace = package.manifest().replace();
-            let replace = replace.iter().map(|&(ref spec, ref dep)| {
+        // In the case where a previous instance of resolve is available, we
+        // want to lock as many packages as possible to the previous version
+        // without disturbing the graph structure. To this end we perform
+        // two actions here:
+        //
+        // 1. We inform the package registry of all locked packages. This
+        //    involves informing it of both the locked package's id as well
+        //    as the versions of all locked dependencies. The registry will
+        //    then takes this information into account when it is queried.
+        //
+        // 2. The specified package's summary will have its dependencies
+        //    modified to their precise variants. This will instruct the
+        //    first step of the resolution process to not query for ranges
+        //    but rather for precise dependency versions.
+        //
+        //    This process must handle altered dependencies, however, as
+        //    it's possible for a manifest to change over time to have
+        //    dependencies added, removed, or modified to different version
+        //    ranges. To deal with this, we only actually lock a dependency
+        //    to the previously resolved version if the dependency listed
+        //    still matches the locked version.
+        for node in r.iter().filter(|p| keep(p, to_avoid, &to_avoid_sources)) {
+            let deps = r.deps_not_replaced(node)
+                        .filter(|p| keep(p, to_avoid, &to_avoid_sources))
+                        .cloned().collect();
+            registry.register_lock(node.clone(), deps);
+        }
+
+        let summary = {
+            let map = r.deps_not_replaced(member.package_id()).filter(|p| {
+                keep(p, to_avoid, &to_avoid_sources)
+            }).map(|d| {
+                (d.name(), d)
+            }).collect::<HashMap<_, _>>();
+
+            member.summary().clone().map_dependencies(|dep| {
+                match map.get(dep.name()) {
+                    Some(&lock) if dep.matches_id(lock) => dep.lock_to(lock),
+                    _ => dep,
+                }
+            })
+        };
+        summaries.push((summary, method));
+    }
+
+    let root_replace = ws.root_replace();
+
+    let replace = match previous {
+        Some(r) => {
+            root_replace.iter().map(|&(ref spec, ref dep)| {
                 for (key, val) in r.replacements().iter() {
                     if spec.matches(key) &&
                        dep.matches_id(val) &&
@@ -112,13 +143,14 @@ pub fn resolve_with_previous<'a>(registry: &mut PackageRegistry,
                     }
                 }
                 (spec.clone(), dep.clone())
-            }).collect::<Vec<_>>();
-            (summary, replace)
+            }).collect::<Vec<_>>()
         }
-        None => (summary, package.manifest().replace().to_owned()),
+        None => root_replace.to_vec(),
     };
 
-    let mut resolved = try!(resolver::resolve(&summary, &method, &replace,
+    let mut resolved = try!(resolver::resolve(try!(ws.current()).package_id(),
+                                              &summaries,
+                                              &replace,
                                               registry));
     if let Some(previous) = previous {
         resolved.copy_metadata(previous);
index 5c753b5469d125c0660a572b9509e412e7f70aaa..20118821c20252cf1ce0f6aa5776950c33fd46b9 100644 (file)
@@ -79,7 +79,9 @@ pub fn canonicalize_url(url: &Url) -> Url {
     let mut url = url.clone();
 
     // Strip a trailing slash
-    url.path_segments_mut().unwrap().pop_if_empty();
+    if url.path().ends_with("/") {
+        url.path_segments_mut().unwrap().pop_if_empty();
+    }
 
     // HACKHACK: For github URL's specifically just lowercase
     // everything.  GitHub treats both the same, but they hash
index 2ca6a2001c0ada32840b6ef9c98c0808acc14ce4..a89f010e85ac41c5a84c47a14a37c71cf25990dd 100644 (file)
@@ -13,7 +13,7 @@ use std::str::FromStr;
 use rustc_serialize::{Encodable,Encoder};
 use toml;
 use core::shell::{Verbosity, ColorConfig};
-use core::{MultiShell, Package};
+use core::{MultiShell, Workspace};
 use util::{CargoResult, CargoError, ChainError, Rustc, internal, human};
 use util::Filesystem;
 
@@ -112,9 +112,9 @@ impl Config {
 
     pub fn cwd(&self) -> &Path { &self.cwd }
 
-    pub fn target_dir(&self, pkg: &Package) -> Filesystem {
+    pub fn target_dir(&self, ws: &Workspace) -> Filesystem {
         self.target_dir.borrow().clone().unwrap_or_else(|| {
-            Filesystem::new(pkg.root().join("target"))
+            Filesystem::new(ws.root().join("target"))
         })
     }
 
index 61baa2f32281257273e317404720100c4849db92..675daa0fd44d50f851a27b1775e2a86ca4e2e7c3 100644 (file)
@@ -9,9 +9,9 @@ use toml;
 use semver::{self, VersionReq};
 use rustc_serialize::{Decodable, Decoder};
 
-use core::{SourceId, Profiles, PackageIdSpec};
-use core::{Summary, Manifest, Target, Dependency, DependencyInner, PackageId,
-           GitReference};
+use core::{SourceId, Profiles, PackageIdSpec, GitReference, WorkspaceConfig};
+use core::{Summary, Manifest, Target, Dependency, DependencyInner, PackageId};
+use core::{EitherManifest, VirtualManifest};
 use core::dependency::{Kind, Platform};
 use core::manifest::{LibKind, Profile, ManifestMetadata};
 use core::package_id::Metadata;
@@ -101,37 +101,41 @@ fn try_add_files(files: &mut Vec<PathBuf>, root: PathBuf) {
     }
 }
 
-pub fn to_manifest(contents: &[u8],
+pub fn to_manifest(contents: &str,
                    source_id: &SourceId,
                    layout: Layout,
                    config: &Config)
-                   -> CargoResult<(Manifest, Vec<PathBuf>)> {
+                   -> CargoResult<(EitherManifest, Vec<PathBuf>)> {
     let manifest = layout.root.join("Cargo.toml");
     let manifest = match util::without_prefix(&manifest, config.cwd()) {
         Some(path) => path.to_path_buf(),
         None => manifest.clone(),
     };
-    let contents = try!(str::from_utf8(contents).map_err(|_| {
-        human(format!("{} is not valid UTF-8", manifest.display()))
-    }));
     let root = try!(parse(contents, &manifest, config));
     let mut d = toml::Decoder::new(toml::Value::Table(root));
     let manifest: TomlManifest = try!(Decodable::decode(&mut d).map_err(|e| {
         human(e.to_string())
     }));
 
-    let pair = try!(manifest.to_manifest(source_id, &layout, config));
-    let (mut manifest, paths) = pair;
-    match d.toml {
-        Some(ref toml) => add_unused_keys(&mut manifest, toml, "".to_string()),
-        None => {}
-    }
-    if !manifest.targets().iter().any(|t| !t.is_custom_build()) {
-        bail!("no targets specified in the manifest\n  \
-               either src/lib.rs, src/main.rs, a [lib] section, or [[bin]] \
-               section must be present")
-    }
-    return Ok((manifest, paths));
+    return match manifest.to_real_manifest(source_id, &layout, config) {
+        Ok((mut manifest, paths)) => {
+            if let Some(ref toml) = d.toml {
+                add_unused_keys(&mut manifest, toml, String::new());
+            }
+            if !manifest.targets().iter().any(|t| !t.is_custom_build()) {
+                bail!("no targets specified in the manifest\n  \
+                       either src/lib.rs, src/main.rs, a [lib] section, or \
+                       [[bin]] section must be present")
+            }
+            Ok((EitherManifest::Real(manifest), paths))
+        }
+        Err(e) => {
+            match manifest.to_virtual_manifest(source_id, &layout, config) {
+                Ok((m, paths)) => Ok((EitherManifest::Virtual(m), paths)),
+                Err(..) => Err(e),
+            }
+        }
+    };
 
     fn add_unused_keys(m: &mut Manifest, toml: &toml::Value, key: String) {
         if key == "package.metadata" {
@@ -240,6 +244,7 @@ pub struct TomlManifest {
     features: Option<HashMap<String, Vec<String>>>,
     target: Option<HashMap<String, TomlPlatform>>,
     replace: Option<HashMap<String, TomlDependency>>,
+    workspace: Option<TomlWorkspace>,
 }
 
 #[derive(RustcDecodable, Clone, Default)]
@@ -272,6 +277,7 @@ pub struct TomlProject {
     exclude: Option<Vec<String>>,
     include: Option<Vec<String>>,
     publish: Option<bool>,
+    workspace: Option<String>,
 
     // package metadata
     description: Option<String>,
@@ -284,6 +290,11 @@ pub struct TomlProject {
     repository: Option<String>,
 }
 
+#[derive(RustcDecodable)]
+pub struct TomlWorkspace {
+    members: Option<Vec<String>>,
+}
+
 pub struct TomlVersion {
     version: semver::Version,
 }
@@ -387,9 +398,11 @@ fn inferred_bench_targets(layout: &Layout) -> Vec<TomlTarget> {
 }
 
 impl TomlManifest {
-    pub fn to_manifest(&self, source_id: &SourceId, layout: &Layout,
-                       config: &Config)
-        -> CargoResult<(Manifest, Vec<PathBuf>)> {
+    fn to_real_manifest(&self,
+                        source_id: &SourceId,
+                        layout: &Layout,
+                        config: &Config)
+                        -> CargoResult<(Manifest, Vec<PathBuf>)> {
         let mut nested_paths = vec![];
         let mut warnings = vec![];
 
@@ -523,7 +536,7 @@ impl TomlManifest {
         }
 
         let mut deps = Vec::new();
-        let mut replace = Vec::new();
+        let replace;
 
         {
 
@@ -560,35 +573,7 @@ impl TomlManifest {
                 }
             }
 
-            if let Some(ref map) = self.replace {
-                for (spec, replacement) in map {
-                    let spec = try!(PackageIdSpec::parse(spec));
-
-                    let version_specified = match *replacement {
-                        TomlDependency::Detailed(ref d) => d.version.is_some(),
-                        TomlDependency::Simple(..) => true,
-                    };
-                    if version_specified {
-                        bail!("replacements cannot specify a version \
-                               requirement, but found one for `{}`", spec);
-                    }
-
-                    let dep = try!(replacement.to_dependency(spec.name(),
-                                                             &mut cx,
-                                                             None));
-                    let dep = {
-                        let version = try!(spec.version().chain_error(|| {
-                            human(format!("replacements must specify a version \
-                                           to replace, but `{}` does not",
-                                          spec))
-                        }));
-                        let req = VersionReq::exact(version);
-                        dep.clone_inner().set_version_req(req)
-                           .into_dependency()
-                    };
-                    replace.push((spec, dep));
-                }
-            }
+            replace = try!(self.replace(&mut cx));
         }
 
         {
@@ -620,6 +605,20 @@ impl TomlManifest {
             repository: project.repository.clone(),
             keywords: project.keywords.clone().unwrap_or(Vec::new()),
         };
+
+        let workspace_config = match (self.workspace.as_ref(),
+                                      project.workspace.as_ref()) {
+            (Some(config), None) => {
+                WorkspaceConfig::Root { members: config.members.clone() }
+            }
+            (None, root) => {
+                WorkspaceConfig::Member { root: root.cloned() }
+            }
+            (Some(..), Some(..)) => {
+                bail!("cannot configure both `package.workspace` and \
+                       `[workspace]`, only one can be specified")
+            }
+        };
         let profiles = build_profiles(&self.profile);
         let publish = project.publish.unwrap_or(true);
         let mut manifest = Manifest::new(summary,
@@ -630,7 +629,8 @@ impl TomlManifest {
                                          metadata,
                                          profiles,
                                          publish,
-                                         replace);
+                                         replace,
+                                         workspace_config);
         if project.license_file.is_some() && project.license.is_some() {
             manifest.add_warning(format!("only one of `license` or \
                                           `license-file` is necessary"));
@@ -641,6 +641,92 @@ impl TomlManifest {
 
         Ok((manifest, nested_paths))
     }
+
+    fn to_virtual_manifest(&self,
+                           source_id: &SourceId,
+                           layout: &Layout,
+                           config: &Config)
+                           -> CargoResult<(VirtualManifest, Vec<PathBuf>)> {
+        if self.project.is_some() {
+            bail!("virtual manifests do not define [project]");
+        }
+        if self.package.is_some() {
+            bail!("virtual manifests do not define [package]");
+        }
+        if self.lib.is_some() {
+            bail!("virtual manifests do not specifiy [lib]");
+        }
+        if self.bin.is_some() {
+            bail!("virtual manifests do not specifiy [[bin]]");
+        }
+        if self.example.is_some() {
+            bail!("virtual manifests do not specifiy [[example]]");
+        }
+        if self.test.is_some() {
+            bail!("virtual manifests do not specifiy [[test]]");
+        }
+        if self.bench.is_some() {
+            bail!("virtual manifests do not specifiy [[bench]]");
+        }
+
+        let mut nested_paths = Vec::new();
+        let mut warnings = Vec::new();
+        let mut deps = Vec::new();
+        let replace = try!(self.replace(&mut Context {
+            deps: &mut deps,
+            source_id: source_id,
+            nested_paths: &mut nested_paths,
+            config: config,
+            warnings: &mut warnings,
+            platform: None,
+            layout: layout,
+        }));
+        let workspace_config = match self.workspace {
+            Some(ref config) => {
+                WorkspaceConfig::Root { members: config.members.clone() }
+            }
+            None => {
+                bail!("virtual manifests must be configured with [workspace]");
+            }
+        };
+        Ok((VirtualManifest::new(replace, workspace_config), nested_paths))
+    }
+
+    fn replace(&self, cx: &mut Context)
+               -> CargoResult<Vec<(PackageIdSpec, Dependency)>> {
+        let map = match self.replace {
+            Some(ref map) => map,
+            None => return Ok(Vec::new()),
+        };
+
+        let mut replace = Vec::new();
+        for (spec, replacement) in map {
+            let spec = try!(PackageIdSpec::parse(spec));
+
+            let version_specified = match *replacement {
+                TomlDependency::Detailed(ref d) => d.version.is_some(),
+                TomlDependency::Simple(..) => true,
+            };
+            if version_specified {
+                bail!("replacements cannot specify a version \
+                       requirement, but found one for `{}`", spec);
+            }
+
+            let dep = try!(replacement.to_dependency(spec.name(), cx, None));
+            let dep = {
+                let version = try!(spec.version().chain_error(|| {
+                    human(format!("replacements must specify a version \
+                                   to replace, but `{}` does not",
+                                  spec))
+                }));
+                let req = VersionReq::exact(version);
+                dep.clone_inner().set_version_req(req)
+                   .into_dependency()
+            };
+            replace.push((spec, dep));
+        }
+        Ok(replace)
+    }
 }
 
 /// Will check a list of toml targets, and make sure the target names are unique within a vector.
index 4dcb7609a9aacee56ab3f777c299ee86eb1512f4..39930b6a2b7cc6b99b16b11800fdd58e3d0b46b2 100644 (file)
@@ -82,6 +82,20 @@ repository by mistake.
 publish = false
 ```
 
+## The `workspace`  Field (optional)
+
+The `workspace` field can be used to configure the workspace that this package
+will be a member of. If not specified this will be inferred as the first
+Cargo.toml with `[workspace]` upwards in the filesystem.
+
+```toml
+[package]
+# ...
+workspace = "path/to/root"
+```
+
+For more information, see the documentation for the workspace table below.
+
 ## Package Metadata
 
 There are a number of optional metadata fields also accepted under the
@@ -341,6 +355,53 @@ In almost all cases, it is an antipattern to use these features outside of
 high-level packages that are designed for curation. If a feature is optional, it
 can almost certainly be expressed as a separate package.
 
+# The `[workspace]` Section
+
+Projects can define a workspace which is a set of crates that will all share the
+same `Cargo.lock` and output directory. The `[workspace]` table can be defined
+as:
+
+```toml
+[workspace]
+
+# Optional key, inferred if not present
+members = ["path/to/member1", "path/to/member2"]
+```
+
+Workspaces were added to Cargo as part [RFC 1525] and have a number of
+properties:
+
+* A workspace can contain multiple crates where one of them is the root crate.
+* The root crate's `Cargo.toml` contains the `[workspace]` table, but is not
+  required to have other configuration.
+* Whenever any crate in the workspace is compiled, output is placed next to the
+  root crate's `Cargo.toml`.
+* The lock file for all crates in the workspace resides next to the root crate's
+  `Cargo.toml`.
+* The `[replace]` section in `Cargo.toml` is only recognized at the workspace
+  root crate, it's ignored in member crates' manifests.
+
+[RFC 1525]: https://github.com/rust-lang/rfcs/blob/master/text/1525-cargo-workspace.md
+
+The root crate of a workspace, indicated by the presence of `[workspace]` in
+its manifest, is responsible for defining the entire workspace (listing all
+members). This can be done through the `members` key, and if it is omitted then
+members are implicitly included through all `path` dependencies. Note that
+members of the workspaces listed explicitly will also have their path
+dependencies included in the workspace.
+
+The `package.workspace` manifest key (described above) is used in member crates
+to point at a workspace's root crate. If this key is omitted then it is inferred
+to be the first crate whose manifest contains `[workspace]` upwards in the
+filesystem.
+
+A crate may either specify `package.workspace` or specify `[workspace]`. That
+is, a crate cannot both be a root crate in a workspace (contain `[workspace]`)
+and also be a member crate of another workspace (contain `package.workspace`).
+
+Most of the time workspaces will not need to be dealt with as `cargo new` and
+`cargo init` will handle workspace configuration automatically.
+
 # The Project Layout
 
 If your project is an executable, name the main source file `src/main.rs`. If it
index 0594aef9b997816ec173730ea589b6c0a8809046..1e81362f1ee2e0f0a4813db24da18564ce5462e2 100644 (file)
@@ -1 +1 @@
-2016-06-21
+2016-07-04
index 47ae3f4ebd84f91b053adbdc27fa5aafce1c8179..c2548f69d54490cd1c6ec33d3add1fe5bd7d8018 100644 (file)
@@ -525,6 +525,7 @@ impl ham::Matcher<ProcessBuilder> for Execs {
 
 impl<'a> ham::Matcher<&'a mut ProcessBuilder> for Execs {
     fn matches(&self, process: &'a mut ProcessBuilder) -> ham::MatchResult {
+        println!("running {}", process);
         let res = process.exec_with_output();
 
         match res {
index d4246fa8234a6a2e28cf3b97176d4ef72608c6cd..020ea94a660ca03ecf525d35ba52d82ad0c34268 100644 (file)
@@ -213,7 +213,7 @@ fn plugin_deps() {
             extern crate syntax;
 
             use rustc_plugin::Registry;
-            use syntax::ast::TokenTree;
+            use syntax::tokenstream::TokenTree;
             use syntax::codemap::Span;
             use syntax::ext::base::{ExtCtxt, MacEager, MacResult};
 
@@ -295,7 +295,7 @@ fn plugin_to_the_max() {
             extern crate baz;
 
             use rustc_plugin::Registry;
-            use syntax::ast::TokenTree;
+            use syntax::tokenstream::TokenTree;
             use syntax::codemap::Span;
             use syntax::ext::base::{ExtCtxt, MacEager, MacResult};
 
index 14ff5ea9a841d37abada67d4dc405cfa6803b550..c472fae8eef8df253f3b8acc0cbfd3497bebb601 100644 (file)
@@ -16,9 +16,12 @@ use cargo::core::resolver::{self, Method};
 fn resolve<R: Registry>(pkg: PackageId, deps: Vec<Dependency>,
                         registry: &mut R)
                         -> CargoResult<Vec<PackageId>> {
-    let summary = Summary::new(pkg, deps, HashMap::new()).unwrap();
+    let summary = Summary::new(pkg.clone(), deps, HashMap::new()).unwrap();
     let method = Method::Everything;
-    Ok(try!(resolver::resolve(&summary, &method, &[], registry)).iter().map(|p| {
+    Ok(try!(resolver::resolve(&pkg,
+                              &[(summary, method)],
+                              &[],
+                              registry)).iter().map(|p| {
         p.clone()
     }).collect())
 }
diff --git a/tests/workspaces.rs b/tests/workspaces.rs
new file mode 100644 (file)
index 0000000..c25809c
--- /dev/null
@@ -0,0 +1,776 @@
+extern crate cargotest;
+extern crate hamcrest;
+
+use cargotest::support::{project, execs};
+use cargotest::support::registry::Package;
+use hamcrest::{assert_that, existing_file, existing_dir, is_not};
+
+#[test]
+fn simple_explicit() {
+    let p = project("foo")
+        .file("Cargo.toml", r#"
+            [project]
+            name = "foo"
+            version = "0.1.0"
+            authors = []
+
+            [workspace]
+            members = ["bar"]
+        "#)
+        .file("src/main.rs", "fn main() {}")
+        .file("bar/Cargo.toml", r#"
+            [project]
+            name = "bar"
+            version = "0.1.0"
+            authors = []
+            workspace = ".."
+        "#)
+        .file("bar/src/main.rs", "fn main() {}");
+    p.build();
+
+    assert_that(p.cargo("build"), execs().with_status(0));
+    assert_that(&p.bin("foo"), existing_file());
+    assert_that(&p.bin("bar"), is_not(existing_file()));
+
+    assert_that(p.cargo("build").cwd(p.root().join("bar")),
+                execs().with_status(0));
+    assert_that(&p.bin("foo"), existing_file());
+    assert_that(&p.bin("bar"), existing_file());
+
+    assert_that(&p.root().join("Cargo.lock"), existing_file());
+    assert_that(&p.root().join("bar/Cargo.lock"), is_not(existing_file()));
+}
+
+#[test]
+fn inferred_root() {
+    let p = project("foo")
+        .file("Cargo.toml", r#"
+            [project]
+            name = "foo"
+            version = "0.1.0"
+            authors = []
+
+            [workspace]
+            members = ["bar"]
+        "#)
+        .file("src/main.rs", "fn main() {}")
+        .file("bar/Cargo.toml", r#"
+            [project]
+            name = "bar"
+            version = "0.1.0"
+            authors = []
+        "#)
+        .file("bar/src/main.rs", "fn main() {}");
+    p.build();
+
+    assert_that(p.cargo("build"), execs().with_status(0));
+    assert_that(&p.bin("foo"), existing_file());
+    assert_that(&p.bin("bar"), is_not(existing_file()));
+
+    assert_that(p.cargo("build").cwd(p.root().join("bar")),
+                execs().with_status(0));
+    assert_that(&p.bin("foo"), existing_file());
+    assert_that(&p.bin("bar"), existing_file());
+
+    assert_that(&p.root().join("Cargo.lock"), existing_file());
+    assert_that(&p.root().join("bar/Cargo.lock"), is_not(existing_file()));
+}
+
+#[test]
+fn inferred_path_dep() {
+    let p = project("foo")
+        .file("Cargo.toml", r#"
+            [project]
+            name = "foo"
+            version = "0.1.0"
+            authors = []
+
+            [dependencies]
+            bar = { path = "bar" }
+
+            [workspace]
+        "#)
+        .file("src/main.rs", "fn main() {}")
+        .file("bar/Cargo.toml", r#"
+            [project]
+            name = "bar"
+            version = "0.1.0"
+            authors = []
+        "#)
+        .file("bar/src/main.rs", "fn main() {}")
+        .file("bar/src/lib.rs", "");
+    p.build();
+
+    assert_that(p.cargo("build"), execs().with_status(0));
+    assert_that(&p.bin("foo"), existing_file());
+    assert_that(&p.bin("bar"), is_not(existing_file()));
+
+    assert_that(p.cargo("build").cwd(p.root().join("bar")),
+                execs().with_status(0));
+    assert_that(&p.bin("foo"), existing_file());
+    assert_that(&p.bin("bar"), existing_file());
+
+    assert_that(&p.root().join("Cargo.lock"), existing_file());
+    assert_that(&p.root().join("bar/Cargo.lock"), is_not(existing_file()));
+}
+
+#[test]
+fn transitive_path_dep() {
+    let p = project("foo")
+        .file("Cargo.toml", r#"
+            [project]
+            name = "foo"
+            version = "0.1.0"
+            authors = []
+
+            [dependencies]
+            bar = { path = "bar" }
+
+            [workspace]
+        "#)
+        .file("src/main.rs", "fn main() {}")
+        .file("bar/Cargo.toml", r#"
+            [project]
+            name = "bar"
+            version = "0.1.0"
+            authors = []
+
+            [dependencies]
+            baz = { path = "../baz" }
+        "#)
+        .file("bar/src/main.rs", "fn main() {}")
+        .file("bar/src/lib.rs", "")
+        .file("baz/Cargo.toml", r#"
+            [project]
+            name = "baz"
+            version = "0.1.0"
+            authors = []
+        "#)
+        .file("baz/src/main.rs", "fn main() {}")
+        .file("baz/src/lib.rs", "");
+    p.build();
+
+    assert_that(p.cargo("build"), execs().with_status(0));
+    assert_that(&p.bin("foo"), existing_file());
+    assert_that(&p.bin("bar"), is_not(existing_file()));
+    assert_that(&p.bin("baz"), is_not(existing_file()));
+
+    assert_that(p.cargo("build").cwd(p.root().join("bar")),
+                execs().with_status(0));
+    assert_that(&p.bin("foo"), existing_file());
+    assert_that(&p.bin("bar"), existing_file());
+    assert_that(&p.bin("baz"), is_not(existing_file()));
+
+    assert_that(p.cargo("build").cwd(p.root().join("baz")),
+                execs().with_status(0));
+    assert_that(&p.bin("foo"), existing_file());
+    assert_that(&p.bin("bar"), existing_file());
+    assert_that(&p.bin("baz"), existing_file());
+
+    assert_that(&p.root().join("Cargo.lock"), existing_file());
+    assert_that(&p.root().join("bar/Cargo.lock"), is_not(existing_file()));
+    assert_that(&p.root().join("baz/Cargo.lock"), is_not(existing_file()));
+}
+
+#[test]
+fn parent_pointer_works() {
+    let p = project("foo")
+        .file("foo/Cargo.toml", r#"
+            [project]
+            name = "foo"
+            version = "0.1.0"
+            authors = []
+
+            [dependencies]
+            bar = { path = "../bar" }
+
+            [workspace]
+        "#)
+        .file("foo/src/main.rs", "fn main() {}")
+        .file("bar/Cargo.toml", r#"
+            [project]
+            name = "bar"
+            version = "0.1.0"
+            authors = []
+            workspace = "../foo"
+        "#)
+        .file("bar/src/main.rs", "fn main() {}")
+        .file("bar/src/lib.rs", "");
+    p.build();
+
+    assert_that(p.cargo("build").cwd(p.root().join("foo")),
+                execs().with_status(0));
+    assert_that(p.cargo("build").cwd(p.root().join("bar")),
+                execs().with_status(0));
+    assert_that(&p.root().join("foo/Cargo.lock"), existing_file());
+    assert_that(&p.root().join("bar/Cargo.lock"), is_not(existing_file()));
+}
+
+#[test]
+fn same_names_in_workspace() {
+    let p = project("foo")
+        .file("Cargo.toml", r#"
+            [project]
+            name = "foo"
+            version = "0.1.0"
+            authors = []
+
+            [workspace]
+            members = ["bar"]
+        "#)
+        .file("src/main.rs", "fn main() {}")
+        .file("bar/Cargo.toml", r#"
+            [project]
+            name = "foo"
+            version = "0.1.0"
+            authors = []
+            workspace = ".."
+        "#)
+        .file("bar/src/main.rs", "fn main() {}");
+    p.build();
+
+    assert_that(p.cargo("build"),
+                execs().with_status(101)
+                       .with_stderr("\
+error: two packages named `foo` in this workspace:
+- [..]Cargo.toml
+- [..]Cargo.toml
+"));
+}
+
+#[test]
+fn parent_doesnt_point_to_child() {
+    let p = project("foo")
+        .file("Cargo.toml", r#"
+            [project]
+            name = "foo"
+            version = "0.1.0"
+            authors = []
+
+            [workspace]
+        "#)
+        .file("src/main.rs", "fn main() {}")
+        .file("bar/Cargo.toml", r#"
+            [project]
+            name = "bar"
+            version = "0.1.0"
+            authors = []
+        "#)
+        .file("bar/src/main.rs", "fn main() {}");
+    p.build();
+
+    assert_that(p.cargo("build").cwd(p.root().join("bar")),
+                execs().with_status(101)
+                       .with_stderr("\
+error: current package believes it's in a workspace when it's not:
+current: [..]Cargo.toml
+workspace: [..]Cargo.toml
+
+this may be fixable [..]
+"));
+}
+
+#[test]
+fn invalid_parent_pointer() {
+    let p = project("foo")
+        .file("Cargo.toml", r#"
+            [project]
+            name = "foo"
+            version = "0.1.0"
+            authors = []
+            workspace = "foo"
+        "#)
+        .file("src/main.rs", "fn main() {}");
+    p.build();
+
+    assert_that(p.cargo("build"),
+                execs().with_status(101)
+                       .with_stderr("\
+error: failed to read `[..]Cargo.toml`
+
+Caused by:
+  [..]
+"));
+}
+
+#[test]
+fn invalid_members() {
+    let p = project("foo")
+        .file("Cargo.toml", r#"
+            [project]
+            name = "foo"
+            version = "0.1.0"
+            authors = []
+
+            [workspace]
+            members = ["foo"]
+        "#)
+        .file("src/main.rs", "fn main() {}");
+    p.build();
+
+    assert_that(p.cargo("build"),
+                execs().with_status(101)
+                       .with_stderr("\
+error: failed to read `[..]Cargo.toml`
+
+Caused by:
+  [..]
+"));
+}
+
+#[test]
+fn bare_workspace_ok() {
+    let p = project("foo")
+        .file("Cargo.toml", r#"
+            [project]
+            name = "foo"
+            version = "0.1.0"
+            authors = []
+
+            [workspace]
+        "#)
+        .file("src/main.rs", "fn main() {}");
+    p.build();
+
+    assert_that(p.cargo("build"), execs().with_status(0));
+}
+
+#[test]
+fn two_roots() {
+    let p = project("foo")
+        .file("Cargo.toml", r#"
+            [project]
+            name = "foo"
+            version = "0.1.0"
+            authors = []
+
+            [workspace]
+            members = ["bar"]
+        "#)
+        .file("src/main.rs", "fn main() {}")
+        .file("bar/Cargo.toml", r#"
+            [project]
+            name = "bar"
+            version = "0.1.0"
+            authors = []
+
+            [workspace]
+            members = [".."]
+        "#)
+        .file("bar/src/main.rs", "fn main() {}");
+    p.build();
+
+    assert_that(p.cargo("build"),
+                execs().with_status(101)
+                       .with_stderr("\
+error: multiple workspace roots found in the same workspace:
+  [..]
+  [..]
+"));
+}
+
+#[test]
+fn workspace_isnt_root() {
+    let p = project("foo")
+        .file("Cargo.toml", r#"
+            [project]
+            name = "foo"
+            version = "0.1.0"
+            authors = []
+            workspace = "bar"
+        "#)
+        .file("src/main.rs", "fn main() {}")
+        .file("bar/Cargo.toml", r#"
+            [project]
+            name = "bar"
+            version = "0.1.0"
+            authors = []
+        "#)
+        .file("bar/src/main.rs", "fn main() {}");
+    p.build();
+
+    assert_that(p.cargo("build"),
+                execs().with_status(101)
+                       .with_stderr("\
+error: root of a workspace inferred but wasn't a root: [..]
+"));
+}
+
+#[test]
+fn dangling_member() {
+    let p = project("foo")
+        .file("Cargo.toml", r#"
+            [project]
+            name = "foo"
+            version = "0.1.0"
+            authors = []
+
+            [workspace]
+            members = ["bar"]
+        "#)
+        .file("src/main.rs", "fn main() {}")
+        .file("bar/Cargo.toml", r#"
+            [project]
+            name = "bar"
+            version = "0.1.0"
+            authors = []
+            workspace = "../baz"
+        "#)
+        .file("bar/src/main.rs", "fn main() {}")
+        .file("baz/Cargo.toml", r#"
+            [project]
+            name = "baz"
+            version = "0.1.0"
+            authors = []
+            workspace = "../baz"
+        "#)
+        .file("baz/src/main.rs", "fn main() {}");
+    p.build();
+
+    assert_that(p.cargo("build"),
+                execs().with_status(101)
+                       .with_stderr("\
+error: package `[..]` is a member of the wrong workspace
+expected: [..]
+actual: [..]
+"));
+}
+
+#[test]
+fn cycle() {
+    let p = project("foo")
+        .file("Cargo.toml", r#"
+            [project]
+            name = "foo"
+            version = "0.1.0"
+            authors = []
+            workspace = "bar"
+        "#)
+        .file("src/main.rs", "fn main() {}")
+        .file("bar/Cargo.toml", r#"
+            [project]
+            name = "bar"
+            version = "0.1.0"
+            authors = []
+            workspace = ".."
+        "#)
+        .file("bar/src/main.rs", "fn main() {}");
+    p.build();
+
+    assert_that(p.cargo("build"),
+                execs().with_status(101));
+}
+
+#[test]
+fn share_dependencies() {
+    let p = project("foo")
+        .file("Cargo.toml", r#"
+            [project]
+            name = "foo"
+            version = "0.1.0"
+            authors = []
+
+            [dependencies]
+            dep1 = "0.1"
+
+            [workspace]
+            members = ["bar"]
+        "#)
+        .file("src/main.rs", "fn main() {}")
+        .file("bar/Cargo.toml", r#"
+            [project]
+            name = "bar"
+            version = "0.1.0"
+            authors = []
+
+            [dependencies]
+            dep1 = "< 0.1.5"
+        "#)
+        .file("bar/src/main.rs", "fn main() {}");
+    p.build();
+
+    Package::new("dep1", "0.1.3").publish();
+    Package::new("dep1", "0.1.8").publish();
+
+    assert_that(p.cargo("build"),
+                execs().with_status(0)
+                       .with_stderr("\
+[UPDATING] registry `[..]`
+[DOWNLOADING] dep1 v0.1.3 ([..])
+[COMPILING] dep1 v0.1.3 ([..])
+[COMPILING] foo v0.1.0 ([..])
+"));
+}
+
+#[test]
+fn fetch_fetches_all() {
+    let p = project("foo")
+        .file("Cargo.toml", r#"
+            [project]
+            name = "foo"
+            version = "0.1.0"
+            authors = []
+
+            [workspace]
+            members = ["bar"]
+        "#)
+        .file("src/main.rs", "fn main() {}")
+        .file("bar/Cargo.toml", r#"
+            [project]
+            name = "bar"
+            version = "0.1.0"
+            authors = []
+
+            [dependencies]
+            dep1 = "*"
+        "#)
+        .file("bar/src/main.rs", "fn main() {}");
+    p.build();
+
+    Package::new("dep1", "0.1.3").publish();
+
+    assert_that(p.cargo("fetch"),
+                execs().with_status(0)
+                       .with_stderr("\
+[UPDATING] registry `[..]`
+[DOWNLOADING] dep1 v0.1.3 ([..])
+"));
+}
+
+#[test]
+fn lock_works_for_everyone() {
+    let p = project("foo")
+        .file("Cargo.toml", r#"
+            [project]
+            name = "foo"
+            version = "0.1.0"
+            authors = []
+
+            [dependencies]
+            dep2 = "0.1"
+
+            [workspace]
+            members = ["bar"]
+        "#)
+        .file("src/main.rs", "fn main() {}")
+        .file("bar/Cargo.toml", r#"
+            [project]
+            name = "bar"
+            version = "0.1.0"
+            authors = []
+
+            [dependencies]
+            dep1 = "0.1"
+        "#)
+        .file("bar/src/main.rs", "fn main() {}");
+    p.build();
+
+    Package::new("dep1", "0.1.0").publish();
+    Package::new("dep2", "0.1.0").publish();
+
+    assert_that(p.cargo("generate-lockfile"),
+                execs().with_status(0)
+                       .with_stderr("\
+[UPDATING] registry `[..]`
+"));
+
+    Package::new("dep1", "0.1.1").publish();
+    Package::new("dep2", "0.1.1").publish();
+
+    assert_that(p.cargo("build"),
+                execs().with_status(0)
+                       .with_stderr("\
+[DOWNLOADING] dep2 v0.1.0 ([..])
+[COMPILING] dep2 v0.1.0 ([..])
+[COMPILING] foo v0.1.0 ([..])
+"));
+
+    assert_that(p.cargo("build").cwd(p.root().join("bar")),
+                execs().with_status(0)
+                       .with_stderr("\
+[DOWNLOADING] dep1 v0.1.0 ([..])
+[COMPILING] dep1 v0.1.0 ([..])
+[COMPILING] bar v0.1.0 ([..])
+"));
+}
+
+#[test]
+fn virtual_works() {
+    let p = project("foo")
+        .file("Cargo.toml", r#"
+            [workspace]
+            members = ["bar"]
+        "#)
+        .file("bar/Cargo.toml", r#"
+            [project]
+            name = "bar"
+            version = "0.1.0"
+            authors = []
+        "#)
+        .file("bar/src/main.rs", "fn main() {}");
+    p.build();
+    assert_that(p.cargo("build").cwd(p.root().join("bar")),
+                execs().with_status(0));
+    assert_that(&p.root().join("Cargo.lock"), existing_file());
+    assert_that(&p.bin("bar"), existing_file());
+    assert_that(&p.root().join("bar/Cargo.lock"), is_not(existing_file()));
+}
+
+#[test]
+fn virtual_misconfigure() {
+    let p = project("foo")
+        .file("Cargo.toml", r#"
+            [workspace]
+        "#)
+        .file("bar/Cargo.toml", r#"
+            [project]
+            name = "bar"
+            version = "0.1.0"
+            authors = []
+        "#)
+        .file("bar/src/main.rs", "fn main() {}");
+    p.build();
+    assert_that(p.cargo("build").cwd(p.root().join("bar")),
+                execs().with_status(101)
+                       .with_stderr("\
+error: current package believes it's in a workspace when it's not:
+current: [..]bar[..]Cargo.toml
+workspace: [..]Cargo.toml
+
+this may be fixable by adding `bar` to the `workspace.members` array of the \
+manifest located at: [..]
+"));
+}
+
+#[test]
+fn virtual_build() {
+    let p = project("foo")
+        .file("Cargo.toml", r#"
+            [workspace]
+        "#)
+        .file("bar/Cargo.toml", r#"
+            [project]
+            name = "bar"
+            version = "0.1.0"
+            authors = []
+        "#)
+        .file("bar/src/main.rs", "fn main() {}");
+    p.build();
+    assert_that(p.cargo("build"),
+                execs().with_status(101)
+                       .with_stderr("\
+error: manifest path `[..]` is a virtual manifest, but this command \
+requires running against an actual package in this workspace
+"));
+}
+
+#[test]
+fn include_virtual() {
+    let p = project("foo")
+        .file("Cargo.toml", r#"
+            [project]
+            name = "bar"
+            version = "0.1.0"
+            authors = []
+            [workspace]
+            members = ["bar"]
+        "#)
+        .file("src/main.rs", "")
+        .file("bar/Cargo.toml", r#"
+            [workspace]
+        "#);
+    p.build();
+    assert_that(p.cargo("build"),
+                execs().with_status(101)
+                       .with_stderr("\
+error: multiple workspace roots found in the same workspace:
+  [..]
+  [..]
+"));
+}
+
+#[test]
+fn members_include_path_deps() {
+    let p = project("foo")
+        .file("Cargo.toml", r#"
+            [project]
+            name = "foo"
+            version = "0.1.0"
+            authors = []
+
+            [workspace]
+            members = ["p1"]
+
+            [dependencies]
+            p3 = { path = "p3" }
+        "#)
+        .file("src/lib.rs", "")
+        .file("p1/Cargo.toml", r#"
+            [project]
+            name = "p1"
+            version = "0.1.0"
+            authors = []
+
+            [dependencies]
+            p2 = { path = "../p2" }
+        "#)
+        .file("p1/src/lib.rs", "")
+        .file("p2/Cargo.toml", r#"
+            [project]
+            name = "p2"
+            version = "0.1.0"
+            authors = []
+        "#)
+        .file("p2/src/lib.rs", "")
+        .file("p3/Cargo.toml", r#"
+            [project]
+            name = "p3"
+            version = "0.1.0"
+            authors = []
+        "#)
+        .file("p3/src/lib.rs", "");
+    p.build();
+
+    assert_that(p.cargo("build").cwd(p.root().join("p1")),
+                execs().with_status(0));
+    assert_that(p.cargo("build").cwd(p.root().join("p2")),
+                execs().with_status(0));
+    assert_that(p.cargo("build").cwd(p.root().join("p3")),
+                execs().with_status(0));
+    assert_that(p.cargo("build"),
+                execs().with_status(0));
+
+    assert_that(&p.root().join("target"), existing_dir());
+    assert_that(&p.root().join("p1/target"), is_not(existing_dir()));
+    assert_that(&p.root().join("p2/target"), is_not(existing_dir()));
+    assert_that(&p.root().join("p3/target"), is_not(existing_dir()));
+}
+
+#[test]
+fn new_warns_you_this_will_not_work() {
+    let p = project("foo")
+        .file("Cargo.toml", r#"
+            [project]
+            name = "foo"
+            version = "0.1.0"
+            authors = []
+
+            [workspace]
+        "#)
+        .file("src/lib.rs", "");
+    p.build();
+
+    assert_that(p.cargo("new").arg("bar").env("USER", "foo"),
+                execs().with_status(0)
+                       .with_stderr("\
+warning: compiling this new crate may not work due to invalid workspace \
+configuration
+
+current package believes it's in a workspace when it's not:
+current: [..]
+workspace: [..]
+
+this may be fixable by ensuring that this crate is depended on by the workspace \
+root: [..]
+"));
+}